diff --git a/inference_models/inference_models/configuration.py b/inference_models/inference_models/configuration.py index a0a34b39fb..636ca8b235 100644 --- a/inference_models/inference_models/configuration.py +++ b/inference_models/inference_models/configuration.py @@ -150,6 +150,22 @@ variable_name="INFERENCE_MODELS_FLORENCE2_DEFAULT_DO_SAMPLE", default=INFERENCE_MODELS_DEFAULT_DO_SAMPLE, ) +INFERENCE_MODELS_FALCON_PERCEPTION_DEFAULT_MASK_THRESHOLD = get_float_from_env( + variable_name="INFERENCE_MODELS_FALCON_PERCEPTION_DEFAULT_MASK_THRESHOLD", + default=0.3, +) +INFERENCE_MODELS_FALCON_PERCEPTION_DEFAULT_MAX_NEW_TOKENS = get_integer_from_env( + variable_name="INFERENCE_MODELS_FALCON_PERCEPTION_DEFAULT_MAX_NEW_TOKENS", + default=2048, +) +INFERENCE_MODELS_FALCON_PERCEPTION_DEFAULT_MIN_IMAGE_SIZE = get_integer_from_env( + variable_name="INFERENCE_MODELS_FALCON_PERCEPTION_DEFAULT_MIN_IMAGE_SIZE", + default=256, +) +INFERENCE_MODELS_FALCON_PERCEPTION_DEFAULT_MAX_IMAGE_SIZE = get_integer_from_env( + variable_name="INFERENCE_MODELS_FALCON_PERCEPTION_DEFAULT_MAX_IMAGE_SIZE", + default=1024, +) INFERENCE_MODELS_GROUNDING_DINO_DEFAULT_BOX_CONFIDENCE = get_float_from_env( variable_name="INFERENCE_MODELS_GROUNDING_DINO_DEFAULT_BOX_CONFIDENCE", default=0.5, diff --git a/inference_models/inference_models/models/auto_loaders/models_registry.py b/inference_models/inference_models/models/auto_loaders/models_registry.py index b89b21828e..7cf0bc3576 100644 --- a/inference_models/inference_models/models/auto_loaders/models_registry.py +++ b/inference_models/inference_models/models/auto_loaders/models_registry.py @@ -511,6 +511,22 @@ class RegistryEntry: module_name="inference_models.models.sam2.sam2_torch", class_name="SAM2Torch", ), + ( + "falcon-perception", + OPEN_VOCABULARY_OBJECT_DETECTION_TASK, + BackendType.TORCH, + ): LazyClass( + module_name="inference_models.models.falcon_perception.falcon_perception_torch", + class_name="FalconPerceptionForObjectDetectionTorch", + ), + ( + "falcon-perception", + INSTANCE_SEGMENTATION_TASK, + BackendType.TORCH, + ): LazyClass( + module_name="inference_models.models.falcon_perception.falcon_perception_torch", + class_name="FalconPerceptionForInstanceSegmentationTorch", + ), } diff --git a/inference_models/inference_models/models/falcon_perception/__init__.py b/inference_models/inference_models/models/falcon_perception/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/inference_models/inference_models/models/falcon_perception/falcon_perception_torch.py b/inference_models/inference_models/models/falcon_perception/falcon_perception_torch.py new file mode 100644 index 0000000000..7857e5fc69 --- /dev/null +++ b/inference_models/inference_models/models/falcon_perception/falcon_perception_torch.py @@ -0,0 +1,595 @@ +from dataclasses import dataclass +from threading import Lock +from typing import Any, Dict, List, Optional, Tuple, Union + +import numpy as np +import torch + +from inference_models.configuration import ( + DEFAULT_DEVICE, + INFERENCE_MODELS_FALCON_PERCEPTION_DEFAULT_MASK_THRESHOLD, + INFERENCE_MODELS_FALCON_PERCEPTION_DEFAULT_MAX_IMAGE_SIZE, + INFERENCE_MODELS_FALCON_PERCEPTION_DEFAULT_MAX_NEW_TOKENS, + INFERENCE_MODELS_FALCON_PERCEPTION_DEFAULT_MIN_IMAGE_SIZE, +) +from inference_models.entities import ColorFormat, ImageDimensions +from inference_models.errors import ModelInputError +from inference_models.models.base.instance_segmentation import ( + InstanceDetections, + InstanceSegmentationModel, +) +from inference_models.models.base.object_detection import ( + Detections, + OpenVocabularyObjectDetectionModel, +) + + +@dataclass +class _PreProcessedInputs: + """Container for batch-processed inputs ready for the model.""" + + batch_inputs: Dict[str, Any] + original_images: List[Any] # PIL images for mask finalization + + +@dataclass +class _PreProcessingMetadata: + """Metadata from pre-processing needed for post-processing.""" + + image_dimensions: List[ImageDimensions] + prompts: List[str] + task: str + + +@dataclass +class PromptResult: + """Result for a single text prompt.""" + + prompt: str + present: bool + xyxy: torch.Tensor # (n_instances, 4) + confidence: torch.Tensor # (n_instances,) + mask: Optional[torch.Tensor] = None # (n_instances, H, W) if segmentation + + +class FalconPerceptionForObjectDetectionTorch( + OpenVocabularyObjectDetectionModel[ + _PreProcessedInputs, + _PreProcessingMetadata, + List[List[Any]], + ] +): + """Falcon Perception model for open-vocabulary object detection (Torch backend). + + This wraps the ``falcon-perception`` package's batch inference engine + to provide bounding-box detection from natural language text prompts. + """ + + @classmethod + def from_pretrained( + cls, + model_name_or_path: str, + device: torch.device = DEFAULT_DEVICE, + dtype: str = "float32", + compile: bool = False, + **kwargs, + ) -> "FalconPerceptionForObjectDetectionTorch": + from falcon_perception import load_and_prepare_model, setup_torch_config + + setup_torch_config() + model, tokenizer, model_args = load_and_prepare_model( + hf_local_dir=model_name_or_path, + device=str(device), + dtype=dtype, + compile=compile, + ) + return cls( + model=model, + tokenizer=tokenizer, + model_args=model_args, + device=device, + ) + + def __init__( + self, + model: Any, + tokenizer: Any, + model_args: Any, + device: torch.device, + ): + self._model = model + self._tokenizer = tokenizer + self._model_args = model_args + self._device = device + self._lock = Lock() + + def pre_process( + self, + images: Union[torch.Tensor, List[torch.Tensor], np.ndarray, List[np.ndarray]], + input_color_format: Optional[ColorFormat] = None, + **kwargs, + ) -> Tuple[_PreProcessedInputs, _PreProcessingMetadata]: + from PIL import Image + + pil_images = _to_pil_images(images, input_color_format) + image_dimensions = [ + ImageDimensions(height=img.height, width=img.width) for img in pil_images + ] + prompts = kwargs.get("classes", []) + if isinstance(prompts, str): + prompts = [prompts] + task = kwargs.get("task", "detection") + return ( + _PreProcessedInputs(batch_inputs={}, original_images=pil_images), + _PreProcessingMetadata( + image_dimensions=image_dimensions, + prompts=prompts, + task=task, + ), + ) + + def forward( + self, + pre_processed_images: _PreProcessedInputs, + classes: List[str], + task: str = "detection", + max_new_tokens: int = INFERENCE_MODELS_FALCON_PERCEPTION_DEFAULT_MAX_NEW_TOKENS, + min_image_size: int = INFERENCE_MODELS_FALCON_PERCEPTION_DEFAULT_MIN_IMAGE_SIZE, + max_image_size: int = INFERENCE_MODELS_FALCON_PERCEPTION_DEFAULT_MAX_IMAGE_SIZE, + mask_threshold: float = INFERENCE_MODELS_FALCON_PERCEPTION_DEFAULT_MASK_THRESHOLD, + **kwargs, + ) -> List[List[Any]]: + from falcon_perception import build_prompt_for_task + from falcon_perception.batch_inference import ( + BatchInferenceEngine, + process_batch_and_generate, + ) + + pil_images = pre_processed_images.original_images + if isinstance(classes, str): + classes = [classes] + all_results: List[List[Any]] = [] + + engine = BatchInferenceEngine(self._model, self._tokenizer) + stop_token_ids = [ + self._tokenizer.eos_token_id, + self._tokenizer.end_of_query_token_id, + ] + + with self._lock, torch.inference_mode(): + for pil_image in pil_images: + per_image_results = [] + for prompt_text in classes: + prompt = build_prompt_for_task(prompt_text, task) + batch_inputs = process_batch_and_generate( + self._tokenizer, + [(pil_image, prompt)], + max_length=4096, + min_dimension=min_image_size, + max_dimension=max_image_size, + ) + batch_inputs = { + k: (v.to(self._device) if torch.is_tensor(v) else v) + for k, v in batch_inputs.items() + } + _, aux_out = engine.generate( + **batch_inputs, + max_new_tokens=max_new_tokens, + temperature=0.0, + stop_token_ids=stop_token_ids, + seed=42, + task=task, + ) + per_image_results.append( + (prompt_text, aux_out[0] if aux_out else None) + ) + all_results.append(per_image_results) + return all_results + + def post_process( + self, + model_results: List[List[Any]], + pre_processing_meta: _PreProcessingMetadata, + **kwargs, + ) -> List[Detections]: + results = [] + for image_idx, per_image_results in enumerate(model_results): + dims = pre_processing_meta.image_dimensions[image_idx] + all_xyxy = [] + all_confidence = [] + all_class_ids = [] + + for prompt_idx, (prompt_text, aux) in enumerate(per_image_results): + if aux is None: + continue + boxes = _extract_boxes_from_aux(aux, dims.width, dims.height) + for box in boxes: + all_xyxy.append(box["xyxy"]) + all_confidence.append(box["confidence"]) + all_class_ids.append(prompt_idx) + + if all_xyxy: + results.append( + Detections( + xyxy=torch.tensor(all_xyxy, dtype=torch.float32), + confidence=torch.tensor(all_confidence, dtype=torch.float32), + class_id=torch.tensor(all_class_ids, dtype=torch.int64), + ) + ) + else: + results.append( + Detections( + xyxy=torch.zeros((0, 4), dtype=torch.float32), + confidence=torch.zeros((0,), dtype=torch.float32), + class_id=torch.zeros((0,), dtype=torch.int64), + ) + ) + return results + + +class FalconPerceptionForInstanceSegmentationTorch( + InstanceSegmentationModel[ + _PreProcessedInputs, + _PreProcessingMetadata, + List[List[Any]], + ] +): + """Falcon Perception model for open-vocabulary instance segmentation (Torch backend). + + This wraps the ``falcon-perception`` package's batch inference engine + to provide bounding-box detection + binary masks from natural language + text prompts. + """ + + @classmethod + def from_pretrained( + cls, + model_name_or_path: str, + device: torch.device = DEFAULT_DEVICE, + dtype: str = "float32", + compile: bool = False, + **kwargs, + ) -> "FalconPerceptionForInstanceSegmentationTorch": + from falcon_perception import load_and_prepare_model, setup_torch_config + + setup_torch_config() + model, tokenizer, model_args = load_and_prepare_model( + hf_local_dir=model_name_or_path, + device=str(device), + dtype=dtype, + compile=compile, + ) + return cls( + model=model, + tokenizer=tokenizer, + model_args=model_args, + device=device, + ) + + def __init__( + self, + model: Any, + tokenizer: Any, + model_args: Any, + device: torch.device, + ): + self._model = model + self._tokenizer = tokenizer + self._model_args = model_args + self._device = device + self._lock = Lock() + + @property + def class_names(self) -> List[str]: + return [] + + def infer( + self, + images: Union[torch.Tensor, List[torch.Tensor], np.ndarray, List[np.ndarray]], + prompts: Optional[List[str]] = None, + task: str = "segmentation", + **kwargs, + ) -> List[InstanceDetections]: + kwargs["classes"] = prompts or [] + kwargs["task"] = task + pre_processed, meta = self.pre_process(images, **kwargs) + raw = self.forward(pre_processed, **kwargs) + return self.post_process(raw, meta, **kwargs) + + def pre_process( + self, + images: Union[torch.Tensor, List[torch.Tensor], np.ndarray, List[np.ndarray]], + input_color_format: Optional[ColorFormat] = None, + **kwargs, + ) -> Tuple[_PreProcessedInputs, _PreProcessingMetadata]: + from PIL import Image + + pil_images = _to_pil_images(images, input_color_format) + image_dimensions = [ + ImageDimensions(height=img.height, width=img.width) for img in pil_images + ] + prompts = kwargs.get("classes", []) + if isinstance(prompts, str): + prompts = [prompts] + task = kwargs.get("task", "segmentation") + return ( + _PreProcessedInputs(batch_inputs={}, original_images=pil_images), + _PreProcessingMetadata( + image_dimensions=image_dimensions, + prompts=prompts, + task=task, + ), + ) + + def forward( + self, + pre_processed_images: _PreProcessedInputs, + task: str = "segmentation", + max_new_tokens: int = INFERENCE_MODELS_FALCON_PERCEPTION_DEFAULT_MAX_NEW_TOKENS, + min_image_size: int = INFERENCE_MODELS_FALCON_PERCEPTION_DEFAULT_MIN_IMAGE_SIZE, + max_image_size: int = INFERENCE_MODELS_FALCON_PERCEPTION_DEFAULT_MAX_IMAGE_SIZE, + mask_threshold: float = INFERENCE_MODELS_FALCON_PERCEPTION_DEFAULT_MASK_THRESHOLD, + **kwargs, + ) -> List[List[Any]]: + from falcon_perception import build_prompt_for_task + from falcon_perception.batch_inference import ( + BatchInferenceEngine, + process_batch_and_generate, + ) + + pil_images = pre_processed_images.original_images + classes = kwargs.get("classes", []) + if isinstance(classes, str): + classes = [classes] + all_results: List[List[Any]] = [] + + engine = BatchInferenceEngine(self._model, self._tokenizer) + stop_token_ids = [ + self._tokenizer.eos_token_id, + self._tokenizer.end_of_query_token_id, + ] + + with self._lock, torch.inference_mode(): + for pil_image in pil_images: + per_image_results = [] + for prompt_text in classes: + prompt = build_prompt_for_task(prompt_text, task) + batch_inputs = process_batch_and_generate( + self._tokenizer, + [(pil_image, prompt)], + max_length=4096, + min_dimension=min_image_size, + max_dimension=max_image_size, + ) + batch_inputs = { + k: (v.to(self._device) if torch.is_tensor(v) else v) + for k, v in batch_inputs.items() + } + _, aux_out = engine.generate( + **batch_inputs, + max_new_tokens=max_new_tokens, + temperature=0.0, + stop_token_ids=stop_token_ids, + seed=42, + task=task, + ) + per_image_results.append( + (prompt_text, aux_out[0] if aux_out else None) + ) + all_results.append(per_image_results) + return all_results + + def post_process( + self, + model_results: List[List[Any]], + pre_processing_meta: _PreProcessingMetadata, + mask_threshold: float = INFERENCE_MODELS_FALCON_PERCEPTION_DEFAULT_MASK_THRESHOLD, + **kwargs, + ) -> List[InstanceDetections]: + results = [] + for image_idx, per_image_results in enumerate(model_results): + dims = pre_processing_meta.image_dimensions[image_idx] + h, w = dims.height, dims.width + all_xyxy = [] + all_confidence = [] + all_class_ids = [] + all_masks = [] + + for prompt_idx, (prompt_text, aux) in enumerate(per_image_results): + if aux is None: + continue + boxes = _extract_boxes_from_aux(aux, w, h) + masks = _extract_masks_from_aux(aux, h, w) + + for i, box in enumerate(boxes): + all_xyxy.append(box["xyxy"]) + all_confidence.append(box["confidence"]) + all_class_ids.append(prompt_idx) + if i < len(masks): + all_masks.append(masks[i]) + else: + all_masks.append(np.zeros((h, w), dtype=np.uint8)) + + if all_xyxy: + mask_tensor = torch.from_numpy(np.stack(all_masks)).bool() + results.append( + InstanceDetections( + xyxy=torch.tensor(all_xyxy, dtype=torch.float32), + confidence=torch.tensor(all_confidence, dtype=torch.float32), + class_id=torch.tensor(all_class_ids, dtype=torch.int64), + mask=mask_tensor, + ) + ) + else: + results.append( + InstanceDetections( + xyxy=torch.zeros((0, 4), dtype=torch.float32), + confidence=torch.zeros((0,), dtype=torch.float32), + class_id=torch.zeros((0,), dtype=torch.int64), + mask=torch.zeros((0, h, w), dtype=torch.bool), + ) + ) + return results + + +# ── Post-processing helpers ─────────────────────────────────────────── + + +def pair_bbox_entries(raw: List[Dict]) -> List[Dict]: + """Pair [{x,y}, {h,w}, ...] into [{x,y,h,w}, ...]. + + Coordinate and size predictions are normalised to [0, 1]. + """ + bboxes: List[Dict] = [] + current: Dict = {} + for entry in raw: + if not isinstance(entry, dict): + continue + current.update(entry) + if all(k in current for k in ("x", "y", "h", "w")): + bboxes.append(dict(current)) + current = {} + return bboxes + + +def normalized_bbox_to_xyxy( + bbox: Dict[str, float], image_width: int, image_height: int +) -> List[float]: + """Convert a normalized {x, y, h, w} center-format bbox to [x1, y1, x2, y2] pixel coords.""" + cx = bbox["x"] * image_width + cy = bbox["y"] * image_height + bw = bbox["w"] * image_width + bh = bbox["h"] * image_height + x1 = max(0.0, cx - bw / 2.0) + y1 = max(0.0, cy - bh / 2.0) + x2 = min(float(image_width), cx + bw / 2.0) + y2 = min(float(image_height), cy + bh / 2.0) + return [x1, y1, x2, y2] + + +def _extract_boxes_from_aux( + aux: Any, image_width: int, image_height: int +) -> List[Dict]: + """Extract bounding boxes from an AuxOutput object.""" + from falcon_perception.aux_output import AuxOutput + + if not isinstance(aux, AuxOutput): + return [] + + raw_bboxes = aux.bboxes_raw + if not raw_bboxes: + raw_bboxes = aux.materialize_bboxes() + + paired = pair_bbox_entries(raw_bboxes) + results = [] + for bbox in paired: + xyxy = normalized_bbox_to_xyxy(bbox, image_width, image_height) + confidence = _compute_bbox_confidence(bbox) + results.append({"xyxy": xyxy, "confidence": confidence}) + return results + + +def _compute_bbox_confidence(bbox: Dict[str, float]) -> float: + """Derive a confidence score from bbox coordinate values. + + Falcon Perception doesn't produce explicit per-instance confidence. + We use a fixed value of 1.0 since the model only emits instances it + considers present. Prompt-level presence/absence is handled by + checking whether any instances were generated at all. + """ + return 1.0 + + +def _extract_masks_from_aux( + aux: Any, image_height: int, image_width: int +) -> List[np.ndarray]: + """Extract binary masks from AuxOutput RLE data.""" + from falcon_perception.aux_output import AuxOutput + from pycocotools import mask as mask_utils + + if not isinstance(aux, AuxOutput): + return [] + + masks = [] + for rle in aux.masks_rle: + try: + rle_for_decode = rle + if isinstance(rle.get("counts"), str): + rle_for_decode = {**rle, "counts": rle["counts"].encode("utf-8")} + binary = mask_utils.decode(rle_for_decode).astype(np.uint8) + if (binary.shape[0], binary.shape[1]) != (image_height, image_width): + from PIL import Image + + mask_img = Image.fromarray(binary * 255) + mask_img = mask_img.resize((image_width, image_height), Image.NEAREST) + binary = (np.array(mask_img) > 127).astype(np.uint8) + masks.append(binary) + except Exception: + masks.append(np.zeros((image_height, image_width), dtype=np.uint8)) + return masks + + +# ── Image conversion helpers ────────────────────────────────────────── + + +def _to_pil_images( + images: Union[torch.Tensor, List[torch.Tensor], np.ndarray, List[np.ndarray]], + input_color_format: Optional[ColorFormat] = None, +) -> List[Any]: + """Convert various image input formats to a list of PIL Images (RGB).""" + from PIL import Image + + if isinstance(images, np.ndarray): + if images.ndim == 3: + images = [images] + elif images.ndim == 4: + images = [images[i] for i in range(images.shape[0])] + else: + raise ModelInputError( + message=f"Unexpected numpy array dimensions: {images.ndim}", + help_url="https://inference-models.roboflow.com/errors/input-validation/#modelinputerror", + ) + elif isinstance(images, torch.Tensor): + if images.ndim == 3: + images = [images] + elif images.ndim == 4: + images = [images[i] for i in range(images.shape[0])] + else: + raise ModelInputError( + message=f"Unexpected tensor dimensions: {images.ndim}", + help_url="https://inference-models.roboflow.com/errors/input-validation/#modelinputerror", + ) + elif not isinstance(images, list): + raise ModelInputError( + message=f"Unsupported image input type: {type(images)}", + help_url="https://inference-models.roboflow.com/errors/input-validation/#modelinputerror", + ) + + pil_images = [] + for img in images: + if isinstance(img, Image.Image): + pil_images.append(img.convert("RGB")) + elif isinstance(img, np.ndarray): + color_fmt = input_color_format or "bgr" + if color_fmt != "rgb": + img = np.ascontiguousarray(img[:, :, ::-1]) + pil_images.append(Image.fromarray(img)) + elif isinstance(img, torch.Tensor): + color_fmt = input_color_format or "rgb" + arr = img.cpu().numpy() + if arr.ndim == 3 and arr.shape[0] in (1, 3): + arr = np.transpose(arr, (1, 2, 0)) + if arr.dtype != np.uint8: + if arr.max() <= 1.0: + arr = (arr * 255).astype(np.uint8) + else: + arr = arr.astype(np.uint8) + if color_fmt != "rgb": + arr = np.ascontiguousarray(arr[:, :, ::-1]) + pil_images.append(Image.fromarray(arr)) + else: + raise ModelInputError( + message=f"Unsupported image element type: {type(img)}", + help_url="https://inference-models.roboflow.com/errors/input-validation/#modelinputerror", + ) + return pil_images diff --git a/inference_models/pyproject.toml b/inference_models/pyproject.toml index cc8e4a9bb4..a62889b29f 100644 --- a/inference_models/pyproject.toml +++ b/inference_models/pyproject.toml @@ -90,6 +90,9 @@ onnx-jp6-cu126 = [ "onnxruntime-gpu>=1.17.0,<1.24.0; platform_system == 'Linux' and platform_machine == 'aarch64' and python_version >= '3.10'", "pycuda>=2025.0.0,<2026.0.0; platform_system == 'Linux' and platform_machine == 'aarch64' and python_version >= '3.10'", ] +falcon-perception = [ + "falcon-perception==1.0.0", +] mediapipe = [ "rf-mediapipe>=0.9,<0.11.0" ] @@ -237,6 +240,30 @@ conflicts = [ { extra = "onnx-cu12" }, { extra = "onnx-jp6-cu126" }, ], + [ + { extra = "falcon-perception" }, + { extra = "torch-cpu" }, + ], + [ + { extra = "falcon-perception" }, + { extra = "torch-cu118" }, + ], + [ + { extra = "falcon-perception" }, + { extra = "torch-cu124" }, + ], + [ + { extra = "falcon-perception" }, + { extra = "torch-cu126" }, + ], + [ + { extra = "falcon-perception" }, + { extra = "torch-jp6-cu126" }, + ], + [ + { extra = "falcon-perception" }, + { extra = "onnx-jp6-cu126" }, + ], ] [tool.setuptools.package-data] diff --git a/inference_models/tests/unit_tests/models/falcon_perception/__init__.py b/inference_models/tests/unit_tests/models/falcon_perception/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/inference_models/tests/unit_tests/models/falcon_perception/test_post_processing.py b/inference_models/tests/unit_tests/models/falcon_perception/test_post_processing.py new file mode 100644 index 0000000000..b4c8017886 --- /dev/null +++ b/inference_models/tests/unit_tests/models/falcon_perception/test_post_processing.py @@ -0,0 +1,180 @@ +"""Unit tests for Falcon Perception post-processing helpers. + +These tests verify the pure-function output conversion logic (bbox pairing, +coordinate conversion, mask decoding) without requiring the actual model or +GPU. +""" + +import numpy as np +import pytest +import torch + +from inference_models.models.falcon_perception.falcon_perception_torch import ( + normalized_bbox_to_xyxy, + pair_bbox_entries, +) + +# ── pair_bbox_entries ───────────────────────────────────────────────── + + +class TestPairBboxEntries: + def test_empty_input(self): + assert pair_bbox_entries([]) == [] + + def test_single_complete_pair(self): + raw = [{"x": 0.5, "y": 0.5}, {"h": 0.1, "w": 0.2}] + result = pair_bbox_entries(raw) + assert len(result) == 1 + assert result[0] == {"x": 0.5, "y": 0.5, "h": 0.1, "w": 0.2} + + def test_multiple_pairs(self): + raw = [ + {"x": 0.1, "y": 0.2}, + {"h": 0.3, "w": 0.4}, + {"x": 0.5, "y": 0.6}, + {"h": 0.7, "w": 0.8}, + ] + result = pair_bbox_entries(raw) + assert len(result) == 2 + assert result[0] == {"x": 0.1, "y": 0.2, "h": 0.3, "w": 0.4} + assert result[1] == {"x": 0.5, "y": 0.6, "h": 0.7, "w": 0.8} + + def test_incomplete_pair_ignored(self): + raw = [{"x": 0.5, "y": 0.5}] + result = pair_bbox_entries(raw) + assert len(result) == 0 + + def test_non_dict_entries_skipped(self): + raw = ["garbage", {"x": 0.5, "y": 0.5}, 42, {"h": 0.1, "w": 0.2}] + result = pair_bbox_entries(raw) + assert len(result) == 1 + assert result[0] == {"x": 0.5, "y": 0.5, "h": 0.1, "w": 0.2} + + def test_all_keys_in_single_dict(self): + raw = [{"x": 0.5, "y": 0.5, "h": 0.1, "w": 0.2}] + result = pair_bbox_entries(raw) + assert len(result) == 1 + + +# ── normalized_bbox_to_xyxy ─────────────────────────────────────────── + + +class TestNormalizedBboxToXyxy: + def test_center_of_image(self): + bbox = {"x": 0.5, "y": 0.5, "w": 0.2, "h": 0.2} + xyxy = normalized_bbox_to_xyxy(bbox, image_width=100, image_height=100) + assert xyxy == pytest.approx([40.0, 40.0, 60.0, 60.0]) + + def test_top_left_corner(self): + bbox = {"x": 0.0, "y": 0.0, "w": 0.2, "h": 0.2} + xyxy = normalized_bbox_to_xyxy(bbox, image_width=100, image_height=100) + # cx=0, cy=0, w=20, h=20 → x1=-10 (clamped to 0), y1=-10 (clamped to 0) + assert xyxy[0] == 0.0 + assert xyxy[1] == 0.0 + assert xyxy[2] == pytest.approx(10.0) + assert xyxy[3] == pytest.approx(10.0) + + def test_bottom_right_corner(self): + bbox = {"x": 1.0, "y": 1.0, "w": 0.2, "h": 0.2} + xyxy = normalized_bbox_to_xyxy(bbox, image_width=100, image_height=100) + # cx=100, cy=100 → x2=110 clamped to 100 + assert xyxy[2] == 100.0 + assert xyxy[3] == 100.0 + + def test_non_square_image(self): + bbox = {"x": 0.5, "y": 0.5, "w": 0.5, "h": 0.5} + xyxy = normalized_bbox_to_xyxy(bbox, image_width=640, image_height=480) + # cx=320, cy=240, bw=320, bh=240 + assert xyxy == pytest.approx([160.0, 120.0, 480.0, 360.0]) + + def test_zero_size_bbox(self): + bbox = {"x": 0.5, "y": 0.5, "w": 0.0, "h": 0.0} + xyxy = normalized_bbox_to_xyxy(bbox, image_width=100, image_height=100) + # Degenerate zero-area box at center + assert xyxy == pytest.approx([50.0, 50.0, 50.0, 50.0]) + + def test_full_image_bbox(self): + bbox = {"x": 0.5, "y": 0.5, "w": 1.0, "h": 1.0} + xyxy = normalized_bbox_to_xyxy(bbox, image_width=200, image_height=200) + assert xyxy == pytest.approx([0.0, 0.0, 200.0, 200.0]) + + +# ── _to_pil_images ─────────────────────────────────────────────────── + + +class TestToPilImages: + def test_numpy_bgr_to_pil(self): + from inference_models.models.falcon_perception.falcon_perception_torch import ( + _to_pil_images, + ) + + img = np.zeros((100, 200, 3), dtype=np.uint8) + img[:, :, 2] = 255 # Red channel in BGR (index 2 = R in BGR) + pil_images = _to_pil_images(img, "bgr") + assert len(pil_images) == 1 + assert pil_images[0].size == (200, 100) + # After BGR→RGB conversion, the red channel should be 255 + arr = np.array(pil_images[0]) + assert arr[0, 0, 0] == 255 # R in RGB + assert arr[0, 0, 2] == 0 # B in RGB + + def test_numpy_rgb_to_pil(self): + from inference_models.models.falcon_perception.falcon_perception_torch import ( + _to_pil_images, + ) + + img = np.zeros((100, 200, 3), dtype=np.uint8) + img[:, :, 0] = 128 # Red channel in RGB + pil_images = _to_pil_images(img, "rgb") + assert len(pil_images) == 1 + arr = np.array(pil_images[0]) + assert arr[0, 0, 0] == 128 + + def test_list_of_numpy(self): + from inference_models.models.falcon_perception.falcon_perception_torch import ( + _to_pil_images, + ) + + imgs = [ + np.zeros((50, 60, 3), dtype=np.uint8), + np.zeros((70, 80, 3), dtype=np.uint8), + ] + pil_images = _to_pil_images(imgs, "rgb") + assert len(pil_images) == 2 + assert pil_images[0].size == (60, 50) + assert pil_images[1].size == (80, 70) + + def test_tensor_chw_to_pil(self): + from inference_models.models.falcon_perception.falcon_perception_torch import ( + _to_pil_images, + ) + + tensor = torch.zeros(3, 100, 200, dtype=torch.uint8) + pil_images = _to_pil_images(tensor, "rgb") + assert len(pil_images) == 1 + assert pil_images[0].size == (200, 100) + + def test_batch_tensor_to_pil(self): + from inference_models.models.falcon_perception.falcon_perception_torch import ( + _to_pil_images, + ) + + tensor = torch.zeros(2, 3, 100, 200, dtype=torch.uint8) + pil_images = _to_pil_images(tensor, "rgb") + assert len(pil_images) == 2 + + +# ── Empty / absent results ──────────────────────────────────────────── + + +class TestEmptyResults: + def test_empty_detections_shape(self): + det = torch.zeros((0, 4), dtype=torch.float32) + assert det.shape == (0, 4) + + def test_pair_bbox_entries_with_empty_dicts(self): + assert pair_bbox_entries([{}, {}]) == [] + + def test_pair_bbox_entries_with_partial_keys(self): + raw = [{"x": 0.5}, {"y": 0.5}, {"h": 0.1}] + assert pair_bbox_entries(raw) == [] diff --git a/inference_models/uv.lock b/inference_models/uv.lock index f18b810eab..8dba9b4d59 100644 --- a/inference_models/uv.lock +++ b/inference_models/uv.lock @@ -1,16 +1,883 @@ version = 1 -revision = 2 +revision = 3 requires-python = ">=3.10, <3.13" resolution-markers = [ - "python_full_version >= '3.12' and sys_platform == 'darwin'", - "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux'", - "(python_full_version >= '3.12' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.12' and sys_platform != 'darwin' and sys_platform != 'linux')", - "python_full_version == '3.11.*' and sys_platform == 'darwin'", - "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux'", - "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.11.*' and sys_platform != 'darwin' and sys_platform != 'linux')", - "python_full_version < '3.11' and sys_platform == 'darwin'", - "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux'", - "(python_full_version < '3.11' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform != 'darwin' and sys_platform != 'linux')", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", ] conflicts = [[ { package = "inference-models", extra = "torch-cpu" }, @@ -25,6 +892,24 @@ conflicts = [[ { package = "inference-models", extra = "onnx-cu118" }, { package = "inference-models", extra = "onnx-cu12" }, { package = "inference-models", extra = "onnx-jp6-cu126" }, +], [ + { package = "inference-models", extra = "falcon-perception" }, + { package = "inference-models", extra = "torch-cpu" }, +], [ + { package = "inference-models", extra = "falcon-perception" }, + { package = "inference-models", extra = "torch-cu118" }, +], [ + { package = "inference-models", extra = "falcon-perception" }, + { package = "inference-models", extra = "torch-cu124" }, +], [ + { package = "inference-models", extra = "falcon-perception" }, + { package = "inference-models", extra = "torch-cu126" }, +], [ + { package = "inference-models", extra = "falcon-perception" }, + { package = "inference-models", extra = "torch-jp6-cu126" }, +], [ + { package = "inference-models", extra = "falcon-perception" }, + { package = "inference-models", extra = "onnx-jp6-cu126" }, ]] [manifest] @@ -64,35 +949,37 @@ requires-dist = ["nvidia-cuda-runtime-cu12"] [[package]] name = "absl-py" -version = "2.3.0" +version = "2.4.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/03/15/18693af986560a5c3cc0b84a8046b536ffb2cdb536e03cce897f2759e284/absl_py-2.3.0.tar.gz", hash = "sha256:d96fda5c884f1b22178852f30ffa85766d50b99e00775ea626c23304f582fc4f", size = 116400, upload-time = "2025-05-27T09:15:50.143Z" } +sdist = { url = "https://files.pythonhosted.org/packages/64/c7/8de93764ad66968d19329a7e0c147a2bb3c7054c554d4a119111b8f9440f/absl_py-2.4.0.tar.gz", hash = "sha256:8c6af82722b35cf71e0f4d1d47dcaebfff286e27110a99fc359349b247dfb5d4", size = 116543, upload-time = "2026-01-28T10:17:05.322Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/87/04/9d75e1d3bb4ab8ec67ff10919476ccdee06c098bcfcf3a352da5f985171d/absl_py-2.3.0-py3-none-any.whl", hash = "sha256:9824a48b654a306168f63e0d97714665f8490b8d89ec7bf2efc24bf67cf579b3", size = 135657, upload-time = "2025-05-27T09:15:48.742Z" }, + { url = "https://files.pythonhosted.org/packages/18/a6/907a406bb7d359e6a63f99c313846d9eec4f7e6f7437809e03aa00fa3074/absl_py-2.4.0-py3-none-any.whl", hash = "sha256:88476fd881ca8aab94ffa78b7b6c632a782ab3ba1cd19c9bd423abc4fb4cd28d", size = 135750, upload-time = "2026-01-28T10:17:04.19Z" }, ] [[package]] name = "accelerate" -version = "1.7.0" +version = "1.13.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "huggingface-hub" }, - { name = "numpy" }, + { name = "numpy", version = "1.26.4", source = { registry = "https://pypi.org/simple" }, marker = "extra == 'extra-16-inference-models-onnx-jp6-cu126' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and extra == 'extra-16-inference-models-falcon-perception') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "2.4.4", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and extra == 'extra-16-inference-models-falcon-perception') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, { name = "packaging" }, { name = "psutil" }, { name = "pyyaml" }, { name = "safetensors" }, - { name = "torch", version = "2.6.0+cu124", source = { registry = "https://download.pytorch.org/whl/cu124" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torch", version = "2.7.1", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torch", version = "2.7.1+cpu", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torch", version = "2.7.1+cu118", source = { registry = "https://download.pytorch.org/whl/cu118" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torch", version = "2.7.1+cu128", source = { registry = "https://download.pytorch.org/whl/cu128" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torch", version = "2.8.0", source = { registry = "https://pypi.jetson-ai-lab.io/jp6/cu126/+simple" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torch", version = "2.8.0", source = { registry = "https://pypi.org/simple" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torch", version = "2.6.0+cu124", source = { registry = "https://download.pytorch.org/whl/cu124" }, marker = "extra == 'extra-16-inference-models-torch-cu124' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torch", version = "2.7.1+cu118", source = { registry = "https://download.pytorch.org/whl/cu118" }, marker = "extra == 'extra-16-inference-models-torch-cu118' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torch", version = "2.11.0", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torch", version = "2.11.0", source = { registry = "https://pypi.jetson-ai-lab.io/jp6/cu126/+simple" }, marker = "extra == 'extra-16-inference-models-torch-jp6-cu126' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126')" }, + { name = "torch", version = "2.11.0", source = { registry = "https://pypi.org/simple" }, marker = "extra == 'extra-16-inference-models-torch-cu126' or extra == 'extra-16-inference-models-torch-cu130' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torch", version = "2.11.0+cpu", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torch", version = "2.11.0+cu128", source = { registry = "https://download.pytorch.org/whl/cu128" }, marker = "extra == 'extra-16-inference-models-torch-cu128' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/97/33/47bbd507e3a851d33d19ce7b2141c5ea3689bfae91ba168044d7db24b0e9/accelerate-1.7.0.tar.gz", hash = "sha256:e8a2a5503d6237b9eee73cc8d36cf543f9c2d8dd2c6713450b322f5e6d53a610", size = 376026, upload-time = "2025-05-15T10:00:52.117Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ca/14/787e5498cd062640f0f3d92ef4ae4063174f76f9afd29d13fc52a319daae/accelerate-1.13.0.tar.gz", hash = "sha256:d631b4e0f5b3de4aff2d7e9e6857d164810dfc3237d54d017f075122d057b236", size = 402835, upload-time = "2026-03-04T19:34:12.359Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f8/bb/be8146c196ad6e4dec78385d91e92591f8a433576c4e04c342a636fcd811/accelerate-1.7.0-py3-none-any.whl", hash = "sha256:cf57165cca28769c6cf2650812371c81b18e05743dfa3c748524b1bb4f2b272f", size = 362095, upload-time = "2025-05-15T10:00:49.914Z" }, + { url = "https://files.pythonhosted.org/packages/7e/46/02ac5e262d4af18054b3e922b2baedbb2a03289ee792162de60a865defc5/accelerate-1.13.0-py3-none-any.whl", hash = "sha256:cf1a3efb96c18f7b152eb0fa7490f3710b19c3f395699358f08decca2b8b62e0", size = 383744, upload-time = "2026-03-04T19:34:10.313Z" }, ] [[package]] @@ -104,6 +991,97 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/6a/00/b08f23b7d7e1e14ce01419a467b583edbb93c6cdb8654e54a9cc579cd61f/addict-2.4.0-py3-none-any.whl", hash = "sha256:249bb56bbfd3cdc2a004ea0ff4c2b6ddc84d53bc2194761636eb314d5cfa5dfc", size = 3832, upload-time = "2020-11-21T16:21:29.588Z" }, ] +[[package]] +name = "aiohappyeyeballs" +version = "2.6.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/26/30/f84a107a9c4331c14b2b586036f40965c128aa4fee4dda5d3d51cb14ad54/aiohappyeyeballs-2.6.1.tar.gz", hash = "sha256:c3f9d0113123803ccadfdf3f0faa505bc78e6a72d1cc4806cbd719826e943558", size = 22760, upload-time = "2025-03-12T01:42:48.764Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0f/15/5bf3b99495fb160b63f95972b81750f18f7f4e02ad051373b669d17d44f2/aiohappyeyeballs-2.6.1-py3-none-any.whl", hash = "sha256:f349ba8f4b75cb25c99c5c2d84e997e485204d2902a9597802b0371f09331fb8", size = 15265, upload-time = "2025-03-12T01:42:47.083Z" }, +] + +[[package]] +name = "aiohttp" +version = "3.13.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "aiohappyeyeballs" }, + { name = "aiosignal" }, + { name = "async-timeout", marker = "python_full_version < '3.11' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130')" }, + { name = "attrs" }, + { name = "frozenlist" }, + { name = "multidict" }, + { name = "propcache" }, + { name = "yarl" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/77/9a/152096d4808df8e4268befa55fba462f440f14beab85e8ad9bf990516918/aiohttp-3.13.5.tar.gz", hash = "sha256:9d98cc980ecc96be6eb4c1994ce35d28d8b1f5e5208a23b421187d1209dbb7d1", size = 7858271, upload-time = "2026-03-31T22:01:03.343Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bd/85/cebc47ee74d8b408749073a1a46c6fcba13d170dc8af7e61996c6c9394ac/aiohttp-3.13.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:02222e7e233295f40e011c1b00e3b0bd451f22cf853a0304c3595633ee47da4b", size = 750547, upload-time = "2026-03-31T21:56:30.024Z" }, + { url = "https://files.pythonhosted.org/packages/05/98/afd308e35b9d3d8c9ec54c0918f1d722c86dc17ddfec272fcdbcce5a3124/aiohttp-3.13.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bace460460ed20614fa6bc8cb09966c0b8517b8c58ad8046828c6078d25333b5", size = 503535, upload-time = "2026-03-31T21:56:31.935Z" }, + { url = "https://files.pythonhosted.org/packages/6f/4d/926c183e06b09d5270a309eb50fbde7b09782bfd305dec1e800f329834fb/aiohttp-3.13.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f546a4dc1e6a5edbb9fd1fd6ad18134550e096a5a43f4ad74acfbd834fc6670", size = 497830, upload-time = "2026-03-31T21:56:33.654Z" }, + { url = "https://files.pythonhosted.org/packages/e4/d6/f47d1c690f115a5c2a5e8938cce4a232a5be9aac5c5fb2647efcbbbda333/aiohttp-3.13.5-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c86969d012e51b8e415a8c6ce96f7857d6a87d6207303ab02d5d11ef0cad2274", size = 1682474, upload-time = "2026-03-31T21:56:35.513Z" }, + { url = "https://files.pythonhosted.org/packages/01/44/056fd37b1bb52eac760303e5196acc74d9d546631b035704ae5927f7b4ac/aiohttp-3.13.5-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:b6f6cd1560c5fa427e3b6074bb24d2c64e225afbb7165008903bd42e4e33e28a", size = 1655259, upload-time = "2026-03-31T21:56:37.843Z" }, + { url = "https://files.pythonhosted.org/packages/91/9f/78eb1a20c1c28ae02f6a3c0f4d7b0dcc66abce5290cadd53d78ce3084175/aiohttp-3.13.5-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:636bc362f0c5bbc7372bc3ae49737f9e3030dbce469f0f422c8f38079780363d", size = 1736204, upload-time = "2026-03-31T21:56:39.822Z" }, + { url = "https://files.pythonhosted.org/packages/de/6c/d20d7de23f0b52b8c1d9e2033b2db1ac4dacbb470bb74c56de0f5f86bb4f/aiohttp-3.13.5-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:6a7cbeb06d1070f1d14895eeeed4dac5913b22d7b456f2eb969f11f4b3993796", size = 1826198, upload-time = "2026-03-31T21:56:41.378Z" }, + { url = "https://files.pythonhosted.org/packages/2f/86/a6f3ff1fd795f49545a7c74b2c92f62729135d73e7e4055bf74da5a26c82/aiohttp-3.13.5-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bca9ef7517fd7874a1a08970ae88f497bf5c984610caa0bf40bd7e8450852b95", size = 1681329, upload-time = "2026-03-31T21:56:43.374Z" }, + { url = "https://files.pythonhosted.org/packages/fb/68/84cd3dab6b7b4f3e6fe9459a961acb142aaab846417f6e8905110d7027e5/aiohttp-3.13.5-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:019a67772e034a0e6b9b17c13d0a8fe56ad9fb150fc724b7f3ffd3724288d9e5", size = 1560023, upload-time = "2026-03-31T21:56:45.031Z" }, + { url = "https://files.pythonhosted.org/packages/41/2c/db61b64b0249e30f954a65ab4cb4970ced57544b1de2e3c98ee5dc24165f/aiohttp-3.13.5-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f34ecee82858e41dd217734f0c41a532bd066bcaab636ad830f03a30b2a96f2a", size = 1652372, upload-time = "2026-03-31T21:56:47.075Z" }, + { url = "https://files.pythonhosted.org/packages/25/6f/e96988a6c982d047810c772e28c43c64c300c943b0ed5c1c0c4ce1e1027c/aiohttp-3.13.5-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:4eac02d9af4813ee289cd63a361576da36dba57f5a1ab36377bc2600db0cbb73", size = 1662031, upload-time = "2026-03-31T21:56:48.835Z" }, + { url = "https://files.pythonhosted.org/packages/b7/26/a56feace81f3d347b4052403a9d03754a0ab23f7940780dada0849a38c92/aiohttp-3.13.5-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:4beac52e9fe46d6abf98b0176a88154b742e878fdf209d2248e99fcdf73cd297", size = 1708118, upload-time = "2026-03-31T21:56:50.833Z" }, + { url = "https://files.pythonhosted.org/packages/78/6e/b6173a8ff03d01d5e1a694bc06764b5dad1df2d4ed8f0ceec12bb3277936/aiohttp-3.13.5-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:c180f480207a9b2475f2b8d8bd7204e47aec952d084b2a2be58a782ffcf96074", size = 1548667, upload-time = "2026-03-31T21:56:52.81Z" }, + { url = "https://files.pythonhosted.org/packages/16/13/13296ffe2c132d888b3fe2c195c8b9c0c24c89c3fa5cc2c44464dc23b22e/aiohttp-3.13.5-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:2837fb92951564d6339cedae4a7231692aa9f73cbc4fb2e04263b96844e03b4e", size = 1724490, upload-time = "2026-03-31T21:56:54.541Z" }, + { url = "https://files.pythonhosted.org/packages/7a/b4/1f1c287f4a79782ef36e5a6e62954c85343bc30470d862d30bd5f26c9fa2/aiohttp-3.13.5-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:d9010032a0b9710f58012a1e9c222528763d860ba2ee1422c03473eab47703e7", size = 1667109, upload-time = "2026-03-31T21:56:56.21Z" }, + { url = "https://files.pythonhosted.org/packages/ef/42/8461a2aaf60a8f4ea4549a4056be36b904b0eb03d97ca9a8a2604681a500/aiohttp-3.13.5-cp310-cp310-win32.whl", hash = "sha256:7c4b6668b2b2b9027f209ddf647f2a4407784b5d88b8be4efcc72036f365baf9", size = 439478, upload-time = "2026-03-31T21:56:58.292Z" }, + { url = "https://files.pythonhosted.org/packages/e5/71/06956304cb5ee439dfe8d86e1b2e70088bd88ed1ced1f42fb29e5d855f0e/aiohttp-3.13.5-cp310-cp310-win_amd64.whl", hash = "sha256:cd3db5927bf9167d5a6157ddb2f036f6b6b0ad001ac82355d43e97a4bde76d76", size = 462047, upload-time = "2026-03-31T21:57:00.257Z" }, + { url = "https://files.pythonhosted.org/packages/d6/f5/a20c4ac64aeaef1679e25c9983573618ff765d7aa829fa2b84ae7573169e/aiohttp-3.13.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7ab7229b6f9b5c1ba4910d6c41a9eb11f543eadb3f384df1b4c293f4e73d44d6", size = 757513, upload-time = "2026-03-31T21:57:02.146Z" }, + { url = "https://files.pythonhosted.org/packages/75/0a/39fa6c6b179b53fcb3e4b3d2b6d6cad0180854eda17060c7218540102bef/aiohttp-3.13.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8f14c50708bb156b3a3ca7230b3d820199d56a48e3af76fa21c2d6087190fe3d", size = 506748, upload-time = "2026-03-31T21:57:04.275Z" }, + { url = "https://files.pythonhosted.org/packages/87/ec/e38ce072e724fd7add6243613f8d1810da084f54175353d25ccf9f9c7e5a/aiohttp-3.13.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e7d2f8616f0ff60bd332022279011776c3ac0faa0f1b463f7bb12326fbc97a1c", size = 501673, upload-time = "2026-03-31T21:57:06.208Z" }, + { url = "https://files.pythonhosted.org/packages/ba/ba/3bc7525d7e2beaa11b309a70d48b0d3cfc3c2089ec6a7d0820d59c657053/aiohttp-3.13.5-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a2567b72e1ffc3ab25510db43f355b29eeada56c0a622e58dcdb19530eb0a3cb", size = 1763757, upload-time = "2026-03-31T21:57:07.882Z" }, + { url = "https://files.pythonhosted.org/packages/5e/ab/e87744cf18f1bd78263aba24924d4953b41086bd3a31d22452378e9028a0/aiohttp-3.13.5-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:fb0540c854ac9c0c5ad495908fdfd3e332d553ec731698c0e29b1877ba0d2ec6", size = 1720152, upload-time = "2026-03-31T21:57:09.946Z" }, + { url = "https://files.pythonhosted.org/packages/6b/f3/ed17a6f2d742af17b50bae2d152315ed1b164b07a5fd5cc1754d99e4dfa5/aiohttp-3.13.5-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c9883051c6972f58bfc4ebb2116345ee2aa151178e99c3f2b2bbe2af712abd13", size = 1818010, upload-time = "2026-03-31T21:57:12.157Z" }, + { url = "https://files.pythonhosted.org/packages/53/06/ecbc63dc937192e2a5cb46df4d3edb21deb8225535818802f210a6ea5816/aiohttp-3.13.5-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2294172ce08a82fb7c7273485895de1fa1186cc8294cfeb6aef4af42ad261174", size = 1907251, upload-time = "2026-03-31T21:57:14.023Z" }, + { url = "https://files.pythonhosted.org/packages/7e/a5/0521aa32c1ddf3aa1e71dcc466be0b7db2771907a13f18cddaa45967d97b/aiohttp-3.13.5-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3a807cabd5115fb55af198b98178997a5e0e57dead43eb74a93d9c07d6d4a7dc", size = 1759969, upload-time = "2026-03-31T21:57:16.146Z" }, + { url = "https://files.pythonhosted.org/packages/f6/78/a38f8c9105199dd3b9706745865a8a59d0041b6be0ca0cc4b2ccf1bab374/aiohttp-3.13.5-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:aa6d0d932e0f39c02b80744273cd5c388a2d9bc07760a03164f229c8e02662f6", size = 1616871, upload-time = "2026-03-31T21:57:17.856Z" }, + { url = "https://files.pythonhosted.org/packages/6f/41/27392a61ead8ab38072105c71aa44ff891e71653fe53d576a7067da2b4e8/aiohttp-3.13.5-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:60869c7ac4aaabe7110f26499f3e6e5696eae98144735b12a9c3d9eae2b51a49", size = 1739844, upload-time = "2026-03-31T21:57:19.679Z" }, + { url = "https://files.pythonhosted.org/packages/6e/55/5564e7ae26d94f3214250009a0b1c65a0c6af4bf88924ccb6fdab901de28/aiohttp-3.13.5-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:26d2f8546f1dfa75efa50c3488215a903c0168d253b75fba4210f57ab77a0fb8", size = 1731969, upload-time = "2026-03-31T21:57:22.006Z" }, + { url = "https://files.pythonhosted.org/packages/6d/c5/705a3929149865fc941bcbdd1047b238e4a72bcb215a9b16b9d7a2e8d992/aiohttp-3.13.5-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f1162a1492032c82f14271e831c8f4b49f2b6078f4f5fc74de2c912fa225d51d", size = 1795193, upload-time = "2026-03-31T21:57:24.256Z" }, + { url = "https://files.pythonhosted.org/packages/a6/19/edabed62f718d02cff7231ca0db4ef1c72504235bc467f7b67adb1679f48/aiohttp-3.13.5-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:8b14eb3262fad0dc2f89c1a43b13727e709504972186ff6a99a3ecaa77102b6c", size = 1606477, upload-time = "2026-03-31T21:57:26.364Z" }, + { url = "https://files.pythonhosted.org/packages/de/fc/76f80ef008675637d88d0b21584596dc27410a990b0918cb1e5776545b5b/aiohttp-3.13.5-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:ca9ac61ac6db4eb6c2a0cd1d0f7e1357647b638ccc92f7e9d8d133e71ed3c6ac", size = 1813198, upload-time = "2026-03-31T21:57:28.316Z" }, + { url = "https://files.pythonhosted.org/packages/e5/67/5b3ac26b80adb20ea541c487f73730dc8fa107d632c998f25bbbab98fcda/aiohttp-3.13.5-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:7996023b2ed59489ae4762256c8516df9820f751cf2c5da8ed2fb20ee50abab3", size = 1752321, upload-time = "2026-03-31T21:57:30.549Z" }, + { url = "https://files.pythonhosted.org/packages/88/06/e4a2e49255ea23fa4feeb5ab092d90240d927c15e47b5b5c48dff5a9ce29/aiohttp-3.13.5-cp311-cp311-win32.whl", hash = "sha256:77dfa48c9f8013271011e51c00f8ada19851f013cde2c48fca1ba5e0caf5bb06", size = 439069, upload-time = "2026-03-31T21:57:32.388Z" }, + { url = "https://files.pythonhosted.org/packages/c0/43/8c7163a596dab4f8be12c190cf467a1e07e4734cf90eebb39f7f5d53fc6a/aiohttp-3.13.5-cp311-cp311-win_amd64.whl", hash = "sha256:d3a4834f221061624b8887090637db9ad4f61752001eae37d56c52fddade2dc8", size = 462859, upload-time = "2026-03-31T21:57:34.455Z" }, + { url = "https://files.pythonhosted.org/packages/be/6f/353954c29e7dcce7cf00280a02c75f30e133c00793c7a2ed3776d7b2f426/aiohttp-3.13.5-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:023ecba036ddd840b0b19bf195bfae970083fd7024ce1ac22e9bba90464620e9", size = 748876, upload-time = "2026-03-31T21:57:36.319Z" }, + { url = "https://files.pythonhosted.org/packages/f5/1b/428a7c64687b3b2e9cd293186695affc0e1e54a445d0361743b231f11066/aiohttp-3.13.5-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:15c933ad7920b7d9a20de151efcd05a6e38302cbf0e10c9b2acb9a42210a2416", size = 499557, upload-time = "2026-03-31T21:57:38.236Z" }, + { url = "https://files.pythonhosted.org/packages/29/47/7be41556bfbb6917069d6a6634bb7dd5e163ba445b783a90d40f5ac7e3a7/aiohttp-3.13.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ab2899f9fa2f9f741896ebb6fa07c4c883bfa5c7f2ddd8cf2aafa86fa981b2d2", size = 500258, upload-time = "2026-03-31T21:57:39.923Z" }, + { url = "https://files.pythonhosted.org/packages/67/84/c9ecc5828cb0b3695856c07c0a6817a99d51e2473400f705275a2b3d9239/aiohttp-3.13.5-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a60eaa2d440cd4707696b52e40ed3e2b0f73f65be07fd0ef23b6b539c9c0b0b4", size = 1749199, upload-time = "2026-03-31T21:57:41.938Z" }, + { url = "https://files.pythonhosted.org/packages/f0/d3/3c6d610e66b495657622edb6ae7c7fd31b2e9086b4ec50b47897ad6042a9/aiohttp-3.13.5-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:55b3bdd3292283295774ab585160c4004f4f2f203946997f49aac032c84649e9", size = 1721013, upload-time = "2026-03-31T21:57:43.904Z" }, + { url = "https://files.pythonhosted.org/packages/49/a0/24409c12217456df0bae7babe3b014e460b0b38a8e60753d6cb339f6556d/aiohttp-3.13.5-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c2b2355dc094e5f7d45a7bb262fe7207aa0460b37a0d87027dcf21b5d890e7d5", size = 1781501, upload-time = "2026-03-31T21:57:46.285Z" }, + { url = "https://files.pythonhosted.org/packages/98/9d/b65ec649adc5bccc008b0957a9a9c691070aeac4e41cea18559fef49958b/aiohttp-3.13.5-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b38765950832f7d728297689ad78f5f2cf79ff82487131c4d26fe6ceecdc5f8e", size = 1878981, upload-time = "2026-03-31T21:57:48.734Z" }, + { url = "https://files.pythonhosted.org/packages/57/d8/8d44036d7eb7b6a8ec4c5494ea0c8c8b94fbc0ed3991c1a7adf230df03bf/aiohttp-3.13.5-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b18f31b80d5a33661e08c89e202edabf1986e9b49c42b4504371daeaa11b47c1", size = 1767934, upload-time = "2026-03-31T21:57:51.171Z" }, + { url = "https://files.pythonhosted.org/packages/31/04/d3f8211f273356f158e3464e9e45484d3fb8c4ce5eb2f6fe9405c3273983/aiohttp-3.13.5-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:33add2463dde55c4f2d9635c6ab33ce154e5ecf322bd26d09af95c5f81cfa286", size = 1566671, upload-time = "2026-03-31T21:57:53.326Z" }, + { url = "https://files.pythonhosted.org/packages/41/db/073e4ebe00b78e2dfcacff734291651729a62953b48933d765dc513bf798/aiohttp-3.13.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:327cc432fdf1356fb4fbc6fe833ad4e9f6aacb71a8acaa5f1855e4b25910e4a9", size = 1705219, upload-time = "2026-03-31T21:57:55.385Z" }, + { url = "https://files.pythonhosted.org/packages/48/45/7dfba71a2f9fd97b15c95c06819de7eb38113d2cdb6319669195a7d64270/aiohttp-3.13.5-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:7c35b0bf0b48a70b4cb4fc5d7bed9b932532728e124874355de1a0af8ec4bc88", size = 1743049, upload-time = "2026-03-31T21:57:57.341Z" }, + { url = "https://files.pythonhosted.org/packages/18/71/901db0061e0f717d226386a7f471bb59b19566f2cae5f0d93874b017271f/aiohttp-3.13.5-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:df23d57718f24badef8656c49743e11a89fd6f5358fa8a7b96e728fda2abf7d3", size = 1749557, upload-time = "2026-03-31T21:57:59.626Z" }, + { url = "https://files.pythonhosted.org/packages/08/d5/41eebd16066e59cd43728fe74bce953d7402f2b4ddfdfef2c0e9f17ca274/aiohttp-3.13.5-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:02e048037a6501a5ec1f6fc9736135aec6eb8a004ce48838cb951c515f32c80b", size = 1558931, upload-time = "2026-03-31T21:58:01.972Z" }, + { url = "https://files.pythonhosted.org/packages/30/e6/4a799798bf05740e66c3a1161079bda7a3dd8e22ca392481d7a7f9af82a6/aiohttp-3.13.5-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:31cebae8b26f8a615d2b546fee45d5ffb76852ae6450e2a03f42c9102260d6fe", size = 1774125, upload-time = "2026-03-31T21:58:04.007Z" }, + { url = "https://files.pythonhosted.org/packages/84/63/7749337c90f92bc2cb18f9560d67aa6258c7060d1397d21529b8004fcf6f/aiohttp-3.13.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:888e78eb5ca55a615d285c3c09a7a91b42e9dd6fc699b166ebd5dee87c9ccf14", size = 1732427, upload-time = "2026-03-31T21:58:06.337Z" }, + { url = "https://files.pythonhosted.org/packages/98/de/cf2f44ff98d307e72fb97d5f5bbae3bfcb442f0ea9790c0bf5c5c2331404/aiohttp-3.13.5-cp312-cp312-win32.whl", hash = "sha256:8bd3ec6376e68a41f9f95f5ed170e2fcf22d4eb27a1f8cb361d0508f6e0557f3", size = 433534, upload-time = "2026-03-31T21:58:08.712Z" }, + { url = "https://files.pythonhosted.org/packages/aa/ca/eadf6f9c8fa5e31d40993e3db153fb5ed0b11008ad5d9de98a95045bed84/aiohttp-3.13.5-cp312-cp312-win_amd64.whl", hash = "sha256:110e448e02c729bcebb18c60b9214a87ba33bac4a9fa5e9a5f139938b56c6cb1", size = 460446, upload-time = "2026-03-31T21:58:10.945Z" }, +] + +[[package]] +name = "aiosignal" +version = "1.4.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "frozenlist" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/61/62/06741b579156360248d1ec624842ad0edf697050bbaf7c3e46394e106ad1/aiosignal-1.4.0.tar.gz", hash = "sha256:f47eecd9468083c2029cc99945502cb7708b082c232f9aca65da147157b251c7", size = 25007, upload-time = "2025-07-03T22:54:43.528Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fb/76/641ae371508676492379f16e2fa48f4e2c11741bd63c48be4b12a6b09cba/aiosignal-1.4.0-py3-none-any.whl", hash = "sha256:053243f8b92b990551949e63930a839ff0cf0b0ebbe0597b0f3fb19e1a0fe82e", size = 7490, upload-time = "2025-07-03T22:54:42.156Z" }, +] + [[package]] name = "annotated-doc" version = "0.0.4" @@ -130,25 +1108,25 @@ sdist = { url = "https://files.pythonhosted.org/packages/3e/38/7859ff46355f76f8d [[package]] name = "anyascii" -version = "0.3.2" +version = "0.3.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/9f/52/93b9ea99063f7cf37fb67f5e3f49480686cbe7f228c48b9d713326223b6e/anyascii-0.3.2.tar.gz", hash = "sha256:9d5d32ef844fe225b8bc7cba7f950534fae4da27a9bf3a6bea2cb0ea46ce4730", size = 214052, upload-time = "2023-03-16T00:24:42.431Z" } +sdist = { url = "https://files.pythonhosted.org/packages/db/ba/edebda727008390936da4a9bf677c19cd63b32d51e864656d2cbd1028e25/anyascii-0.3.3.tar.gz", hash = "sha256:c94e9dd9d47b3d9494eca305fef9447d00b4bf1a32aff85aa746fa3ec7fb95c3", size = 264680, upload-time = "2025-06-29T03:33:30.427Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/4f/7b/a9a747e0632271d855da379532b05a62c58e979813814a57fa3b3afeb3a4/anyascii-0.3.2-py3-none-any.whl", hash = "sha256:3b3beef6fc43d9036d3b0529050b0c48bfad8bc960e9e562d7223cfb94fe45d4", size = 289923, upload-time = "2023-03-16T00:24:39.649Z" }, + { url = "https://files.pythonhosted.org/packages/c2/76/783b75a21ce3563b8709050de030ae253853b147bd52e141edc1025aa268/anyascii-0.3.3-py3-none-any.whl", hash = "sha256:f5ab5e53c8781a36b5a40e1296a0eeda2f48c649ef10c3921c1381b1d00dee7a", size = 345090, upload-time = "2025-06-29T03:33:28.356Z" }, ] [[package]] name = "anyio" -version = "4.12.1" +version = "4.13.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "exceptiongroup", marker = "python_full_version < '3.11' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "exceptiongroup", marker = "python_full_version < '3.11' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, { name = "idna" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/96/f0/5eb65b2bb0d09ac6776f2eb54adee6abe8228ea05b20a5ad0e4945de8aac/anyio-4.12.1.tar.gz", hash = "sha256:41cfcc3a4c85d3f05c932da7c26d0201ac36f72abd4435ba90d0464a3ffed703", size = 228685, upload-time = "2026-01-06T11:45:21.246Z" } +sdist = { url = "https://files.pythonhosted.org/packages/19/14/2c5dd9f512b66549ae92767a9c7b330ae88e1932ca57876909410251fe13/anyio-4.13.0.tar.gz", hash = "sha256:334b70e641fd2221c1505b3890c69882fe4a2df910cba14d97019b90b24439dc", size = 231622, upload-time = "2026-03-24T12:59:09.671Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/38/0e/27be9fdef66e72d64c0cdc3cc2823101b80585f8119b5c112c2e8f5f7dab/anyio-4.12.1-py3-none-any.whl", hash = "sha256:d405828884fc140aa80a3c667b8beed277f1dfedec42ba031bd6ac3db606ab6c", size = 113592, upload-time = "2026-01-06T11:45:19.497Z" }, + { url = "https://files.pythonhosted.org/packages/da/42/e921fccf5015463e32a3cf6ee7f980a6ed0f395ceeaa45060b61d86486c2/anyio-4.13.0-py3-none-any.whl", hash = "sha256:08b310f9e24a9594186fd75b4f73f4a4152069e3853f1ed8bfbf58369f4ad708", size = 114353, upload-time = "2026-03-24T12:59:08.246Z" }, ] [[package]] @@ -169,22 +1147,31 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/d2/39/e7eaf1799466a4aef85b6a4fe7bd175ad2b1c6345066aa33f1f58d4b18d0/asttokens-3.0.1-py3-none-any.whl", hash = "sha256:15a3ebc0f43c2d0a50eeafea25e19046c68398e487b9f1f5b517f7c0f40f976a", size = 27047, upload-time = "2025-11-15T16:43:16.109Z" }, ] +[[package]] +name = "async-timeout" +version = "5.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a5/ae/136395dfbfe00dfc94da3f3e136d0b13f394cba8f4841120e34226265780/async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3", size = 9274, upload-time = "2024-11-06T16:41:39.6Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fe/ba/e2081de779ca30d473f21f5b30e0e737c438205440784c7dfc81efc2b029/async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c", size = 6233, upload-time = "2024-11-06T16:41:37.9Z" }, +] + [[package]] name = "attrs" -version = "25.3.0" +version = "26.1.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/5a/b0/1367933a8532ee6ff8d63537de4f1177af4bff9f3e829baf7331f595bb24/attrs-25.3.0.tar.gz", hash = "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b", size = 812032, upload-time = "2025-03-13T11:10:22.779Z" } +sdist = { url = "https://files.pythonhosted.org/packages/9a/8e/82a0fe20a541c03148528be8cac2408564a6c9a0cc7e9171802bc1d26985/attrs-26.1.0.tar.gz", hash = "sha256:d03ceb89cb322a8fd706d4fb91940737b6642aa36998fe130a9bc96c985eff32", size = 952055, upload-time = "2026-03-19T14:22:25.026Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/77/06/bb80f5f86020c4551da315d78b3ab75e8228f89f0162f2c3a819e407941a/attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3", size = 63815, upload-time = "2025-03-13T11:10:21.14Z" }, + { url = "https://files.pythonhosted.org/packages/64/b4/17d4b0b2a2dc85a6df63d1157e028ed19f90d4cd97c36717afef2bc2f395/attrs-26.1.0-py3-none-any.whl", hash = "sha256:c647aa4a12dfbad9333ca4e71fe62ddc36f4e63b2d260a37a8b83d2f043ac309", size = 67548, upload-time = "2026-03-19T14:22:23.645Z" }, ] [[package]] name = "babel" -version = "2.17.0" +version = "2.18.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/7d/6b/d52e42361e1aa00709585ecc30b3f9684b3ab62530771402248b1b1d6240/babel-2.17.0.tar.gz", hash = "sha256:0c54cffb19f690cdcc52a3b50bcbf71e07a808d1c80d549f2459b9d2cf0afb9d", size = 9951852, upload-time = "2025-02-01T15:17:41.026Z" } +sdist = { url = "https://files.pythonhosted.org/packages/7d/b2/51899539b6ceeeb420d40ed3cd4b7a40519404f9baf3d4ac99dc413a834b/babel-2.18.0.tar.gz", hash = "sha256:b80b99a14bd085fcacfa15c9165f651fbb3406e66cc603abf11c5750937c992d", size = 9959554, upload-time = "2026-02-01T12:30:56.078Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b7/b8/3fe70c75fe32afc4bb507f75563d39bc5642255d1d94f1f23604725780bf/babel-2.17.0-py3-none-any.whl", hash = "sha256:4d0b53093fdfb4b21c92b5213dba5a1b23885afa8383709427046b21c366e5f2", size = 10182537, upload-time = "2025-02-01T15:17:37.39Z" }, + { url = "https://files.pythonhosted.org/packages/77/f5/21d2de20e8b8b0408f0681956ca2c69f1320a3848ac50e6e7f39c6159675/babel-2.18.0-py3-none-any.whl", hash = "sha256:e2b422b277c2b9a9630c1d7903c2a00d0830c409c59ac8cae9081c92f1aeba35", size = 10196845, upload-time = "2026-02-01T12:30:53.445Z" }, ] [[package]] @@ -198,14 +1185,14 @@ wheels = [ [[package]] name = "backrefs" -version = "6.1" +version = "6.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/86/e3/bb3a439d5cb255c4774724810ad8073830fac9c9dee123555820c1bcc806/backrefs-6.1.tar.gz", hash = "sha256:3bba1749aafe1db9b915f00e0dd166cba613b6f788ffd63060ac3485dc9be231", size = 7011962, upload-time = "2025-11-15T14:52:08.323Z" } +sdist = { url = "https://files.pythonhosted.org/packages/4e/a6/e325ec73b638d3ede4421b5445d4a0b8b219481826cc079d510100af356c/backrefs-6.2.tar.gz", hash = "sha256:f44ff4d48808b243b6c0cdc6231e22195c32f77046018141556c66f8bab72a49", size = 7012303, upload-time = "2026-02-16T19:10:15.828Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3b/ee/c216d52f58ea75b5e1841022bbae24438b19834a29b163cb32aa3a2a7c6e/backrefs-6.1-py310-none-any.whl", hash = "sha256:2a2ccb96302337ce61ee4717ceacfbf26ba4efb1d55af86564b8bbaeda39cac1", size = 381059, upload-time = "2025-11-15T14:51:59.758Z" }, - { url = "https://files.pythonhosted.org/packages/e6/9a/8da246d988ded941da96c7ed945d63e94a445637eaad985a0ed88787cb89/backrefs-6.1-py311-none-any.whl", hash = "sha256:e82bba3875ee4430f4de4b6db19429a27275d95a5f3773c57e9e18abc23fd2b7", size = 392854, upload-time = "2025-11-15T14:52:01.194Z" }, - { url = "https://files.pythonhosted.org/packages/37/c9/fd117a6f9300c62bbc33bc337fd2b3c6bfe28b6e9701de336b52d7a797ad/backrefs-6.1-py312-none-any.whl", hash = "sha256:c64698c8d2269343d88947c0735cb4b78745bd3ba590e10313fbf3f78c34da5a", size = 398770, upload-time = "2025-11-15T14:52:02.584Z" }, - { url = "https://files.pythonhosted.org/packages/02/e3/a4fa1946722c4c7b063cc25043a12d9ce9b4323777f89643be74cef2993c/backrefs-6.1-py39-none-any.whl", hash = "sha256:a9e99b8a4867852cad177a6430e31b0f6e495d65f8c6c134b68c14c3c95bf4b0", size = 381058, upload-time = "2025-11-15T14:52:06.698Z" }, + { url = "https://files.pythonhosted.org/packages/1b/39/3765df263e08a4df37f4f43cb5aa3c6c17a4bdd42ecfe841e04c26037171/backrefs-6.2-py310-none-any.whl", hash = "sha256:0fdc7b012420b6b144410342caeb8adc54c6866cf12064abc9bb211302e496f8", size = 381075, upload-time = "2026-02-16T19:10:04.322Z" }, + { url = "https://files.pythonhosted.org/packages/0f/f0/35240571e1b67ffb19dafb29ab34150b6f59f93f717b041082cdb1bfceb1/backrefs-6.2-py311-none-any.whl", hash = "sha256:08aa7fae530c6b2361d7bdcbda1a7c454e330cc9dbcd03f5c23205e430e5c3be", size = 392874, upload-time = "2026-02-16T19:10:06.314Z" }, + { url = "https://files.pythonhosted.org/packages/e3/63/77e8c9745b4d227cce9f5e0a6f68041278c5f9b18588b35905f5f19c1beb/backrefs-6.2-py312-none-any.whl", hash = "sha256:c3f4b9cb2af8cda0d87ab4f57800b57b95428488477be164dd2b47be54db0c90", size = 398787, upload-time = "2026-02-16T19:10:08.274Z" }, + { url = "https://files.pythonhosted.org/packages/21/f8/d02f650c47d05034dcd6f9c8cf94f39598b7a89c00ecda0ecb2911bc27e9/backrefs-6.2-py39-none-any.whl", hash = "sha256:664e33cd88c6840b7625b826ecf2555f32d491800900f5a541f772c485f7cda7", size = 381077, upload-time = "2026-02-16T19:10:13.74Z" }, ] [[package]] @@ -226,13 +1213,16 @@ name = "bitsandbytes" version = "0.47.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "numpy", marker = "sys_platform != 'darwin' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torch", version = "2.6.0+cu124", source = { registry = "https://download.pytorch.org/whl/cu124" }, marker = "(sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torch", version = "2.7.1+cpu", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torch", version = "2.7.1+cu118", source = { registry = "https://download.pytorch.org/whl/cu118" }, marker = "(sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torch", version = "2.7.1+cu128", source = { registry = "https://download.pytorch.org/whl/cu128" }, marker = "(sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torch", version = "2.8.0", source = { registry = "https://pypi.jetson-ai-lab.io/jp6/cu126/+simple" }, marker = "(sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torch", version = "2.8.0", source = { registry = "https://pypi.org/simple" }, marker = "(sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "1.26.4", source = { registry = "https://pypi.org/simple" }, marker = "extra == 'extra-16-inference-models-onnx-jp6-cu126' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and extra == 'extra-16-inference-models-falcon-perception') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "2.4.4", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and extra == 'extra-16-inference-models-falcon-perception') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torch", version = "2.6.0+cu124", source = { registry = "https://download.pytorch.org/whl/cu124" }, marker = "extra == 'extra-16-inference-models-torch-cu124' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torch", version = "2.7.1+cu118", source = { registry = "https://download.pytorch.org/whl/cu118" }, marker = "extra == 'extra-16-inference-models-torch-cu118' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torch", version = "2.11.0", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torch", version = "2.11.0", source = { registry = "https://pypi.jetson-ai-lab.io/jp6/cu126/+simple" }, marker = "extra == 'extra-16-inference-models-torch-jp6-cu126' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126')" }, + { name = "torch", version = "2.11.0", source = { registry = "https://pypi.org/simple" }, marker = "extra == 'extra-16-inference-models-torch-cu126' or extra == 'extra-16-inference-models-torch-cu130' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torch", version = "2.11.0+cpu", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torch", version = "2.11.0+cu128", source = { registry = "https://download.pytorch.org/whl/cu128" }, marker = "extra == 'extra-16-inference-models-torch-cu128' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, ] wheels = [ { url = "https://files.pythonhosted.org/packages/aa/eb/477d6b5602f469c7305fd43eec71d890c39909f615c1d7138f6e7d226eff/bitsandbytes-0.47.0-py3-none-manylinux_2_24_aarch64.whl", hash = "sha256:2f805b76891a596025e9e13318b675d08481b9ee650d65e5d2f9d844084c6521", size = 30004641, upload-time = "2025-08-11T18:51:20.524Z" }, @@ -271,7 +1261,7 @@ wheels = [ [[package]] name = "cairosvg" -version = "2.8.2" +version = "2.9.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cairocffi" }, @@ -280,124 +1270,135 @@ dependencies = [ { name = "pillow" }, { name = "tinycss2" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ab/b9/5106168bd43d7cd8b7cc2a2ee465b385f14b63f4c092bb89eee2d48c8e67/cairosvg-2.8.2.tar.gz", hash = "sha256:07cbf4e86317b27a92318a4cac2a4bb37a5e9c1b8a27355d06874b22f85bef9f", size = 8398590, upload-time = "2025-05-15T06:56:32.653Z" } +sdist = { url = "https://files.pythonhosted.org/packages/38/07/e8412a13019b3f737972dea23a2c61ca42becafc16c9338f4ca7a0caa993/cairosvg-2.9.0.tar.gz", hash = "sha256:1debb00cd2da11350d8b6f5ceb739f1b539196d71d5cf5eb7363dbd1bfbc8dc5", size = 40877, upload-time = "2026-03-13T15:42:00.564Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/67/48/816bd4aaae93dbf9e408c58598bc32f4a8c65f4b86ab560864cb3ee60adb/cairosvg-2.8.2-py3-none-any.whl", hash = "sha256:eab46dad4674f33267a671dce39b64be245911c901c70d65d2b7b0821e852bf5", size = 45773, upload-time = "2025-05-15T06:56:28.552Z" }, + { url = "https://files.pythonhosted.org/packages/bf/e0/5011747466414c12cac8a8df77aa235068669a6a5a5df301a96209db6054/cairosvg-2.9.0-py3-none-any.whl", hash = "sha256:4b82d07d145377dffdfc19d9791bd5fb65539bb4da0adecf0bdbd9cd4ffd7c68", size = 45962, upload-time = "2026-03-14T13:56:33.512Z" }, ] [[package]] name = "certifi" -version = "2025.6.15" +version = "2026.2.25" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/73/f7/f14b46d4bcd21092d7d3ccef689615220d8a08fb25e564b65d20738e672e/certifi-2025.6.15.tar.gz", hash = "sha256:d747aa5a8b9bbbb1bb8c22bb13e22bd1f18e9796defa16bab421f7f7a317323b", size = 158753, upload-time = "2025-06-15T02:45:51.329Z" } +sdist = { url = "https://files.pythonhosted.org/packages/af/2d/7bf41579a8986e348fa033a31cdd0e4121114f6bce2457e8876010b092dd/certifi-2026.2.25.tar.gz", hash = "sha256:e887ab5cee78ea814d3472169153c2d12cd43b14bd03329a39a9c6e2e80bfba7", size = 155029, upload-time = "2026-02-25T02:54:17.342Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/84/ae/320161bd181fc06471eed047ecce67b693fd7515b16d495d8932db763426/certifi-2025.6.15-py3-none-any.whl", hash = "sha256:2e0c7ce7cb5d8f8634ca55d2ba7e6ec2689a2fd6537d8dec1296a477a4910057", size = 157650, upload-time = "2025-06-15T02:45:49.977Z" }, + { url = "https://files.pythonhosted.org/packages/9a/3c/c17fb3ca2d9c3acff52e30b309f538586f9f5b9c9cf454f3845fc9af4881/certifi-2026.2.25-py3-none-any.whl", hash = "sha256:027692e4402ad994f1c42e52a4997a9763c646b73e4096e4d5d6db8af1d6f0fa", size = 153684, upload-time = "2026-02-25T02:54:15.766Z" }, ] [[package]] name = "cffi" -version = "1.17.1" +version = "2.0.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "pycparser" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/fc/97/c783634659c2920c3fc70419e3af40972dbaf758daa229a7d6ea6135c90d/cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824", size = 516621, upload-time = "2024-09-04T20:45:21.852Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/90/07/f44ca684db4e4f08a3fdc6eeb9a0d15dc6883efc7b8c90357fdbf74e186c/cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14", size = 182191, upload-time = "2024-09-04T20:43:30.027Z" }, - { url = "https://files.pythonhosted.org/packages/08/fd/cc2fedbd887223f9f5d170c96e57cbf655df9831a6546c1727ae13fa977a/cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67", size = 178592, upload-time = "2024-09-04T20:43:32.108Z" }, - { url = "https://files.pythonhosted.org/packages/de/cc/4635c320081c78d6ffc2cab0a76025b691a91204f4aa317d568ff9280a2d/cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382", size = 426024, upload-time = "2024-09-04T20:43:34.186Z" }, - { url = "https://files.pythonhosted.org/packages/b6/7b/3b2b250f3aab91abe5f8a51ada1b717935fdaec53f790ad4100fe2ec64d1/cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702", size = 448188, upload-time = "2024-09-04T20:43:36.286Z" }, - { url = "https://files.pythonhosted.org/packages/d3/48/1b9283ebbf0ec065148d8de05d647a986c5f22586b18120020452fff8f5d/cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3", size = 455571, upload-time = "2024-09-04T20:43:38.586Z" }, - { url = "https://files.pythonhosted.org/packages/40/87/3b8452525437b40f39ca7ff70276679772ee7e8b394934ff60e63b7b090c/cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6", size = 436687, upload-time = "2024-09-04T20:43:40.084Z" }, - { url = "https://files.pythonhosted.org/packages/8d/fb/4da72871d177d63649ac449aec2e8a29efe0274035880c7af59101ca2232/cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17", size = 446211, upload-time = "2024-09-04T20:43:41.526Z" }, - { url = "https://files.pythonhosted.org/packages/ab/a0/62f00bcb411332106c02b663b26f3545a9ef136f80d5df746c05878f8c4b/cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8", size = 461325, upload-time = "2024-09-04T20:43:43.117Z" }, - { url = "https://files.pythonhosted.org/packages/36/83/76127035ed2e7e27b0787604d99da630ac3123bfb02d8e80c633f218a11d/cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e", size = 438784, upload-time = "2024-09-04T20:43:45.256Z" }, - { url = "https://files.pythonhosted.org/packages/21/81/a6cd025db2f08ac88b901b745c163d884641909641f9b826e8cb87645942/cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be", size = 461564, upload-time = "2024-09-04T20:43:46.779Z" }, - { url = "https://files.pythonhosted.org/packages/f8/fe/4d41c2f200c4a457933dbd98d3cf4e911870877bd94d9656cc0fcb390681/cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c", size = 171804, upload-time = "2024-09-04T20:43:48.186Z" }, - { url = "https://files.pythonhosted.org/packages/d1/b6/0b0f5ab93b0df4acc49cae758c81fe4e5ef26c3ae2e10cc69249dfd8b3ab/cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15", size = 181299, upload-time = "2024-09-04T20:43:49.812Z" }, - { url = "https://files.pythonhosted.org/packages/6b/f4/927e3a8899e52a27fa57a48607ff7dc91a9ebe97399b357b85a0c7892e00/cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401", size = 182264, upload-time = "2024-09-04T20:43:51.124Z" }, - { url = "https://files.pythonhosted.org/packages/6c/f5/6c3a8efe5f503175aaddcbea6ad0d2c96dad6f5abb205750d1b3df44ef29/cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf", size = 178651, upload-time = "2024-09-04T20:43:52.872Z" }, - { url = "https://files.pythonhosted.org/packages/94/dd/a3f0118e688d1b1a57553da23b16bdade96d2f9bcda4d32e7d2838047ff7/cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4", size = 445259, upload-time = "2024-09-04T20:43:56.123Z" }, - { url = "https://files.pythonhosted.org/packages/2e/ea/70ce63780f096e16ce8588efe039d3c4f91deb1dc01e9c73a287939c79a6/cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41", size = 469200, upload-time = "2024-09-04T20:43:57.891Z" }, - { url = "https://files.pythonhosted.org/packages/1c/a0/a4fa9f4f781bda074c3ddd57a572b060fa0df7655d2a4247bbe277200146/cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1", size = 477235, upload-time = "2024-09-04T20:44:00.18Z" }, - { url = "https://files.pythonhosted.org/packages/62/12/ce8710b5b8affbcdd5c6e367217c242524ad17a02fe5beec3ee339f69f85/cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6", size = 459721, upload-time = "2024-09-04T20:44:01.585Z" }, - { url = "https://files.pythonhosted.org/packages/ff/6b/d45873c5e0242196f042d555526f92aa9e0c32355a1be1ff8c27f077fd37/cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d", size = 467242, upload-time = "2024-09-04T20:44:03.467Z" }, - { url = "https://files.pythonhosted.org/packages/1a/52/d9a0e523a572fbccf2955f5abe883cfa8bcc570d7faeee06336fbd50c9fc/cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6", size = 477999, upload-time = "2024-09-04T20:44:05.023Z" }, - { url = "https://files.pythonhosted.org/packages/44/74/f2a2460684a1a2d00ca799ad880d54652841a780c4c97b87754f660c7603/cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f", size = 454242, upload-time = "2024-09-04T20:44:06.444Z" }, - { url = "https://files.pythonhosted.org/packages/f8/4a/34599cac7dfcd888ff54e801afe06a19c17787dfd94495ab0c8d35fe99fb/cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b", size = 478604, upload-time = "2024-09-04T20:44:08.206Z" }, - { url = "https://files.pythonhosted.org/packages/34/33/e1b8a1ba29025adbdcda5fb3a36f94c03d771c1b7b12f726ff7fef2ebe36/cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655", size = 171727, upload-time = "2024-09-04T20:44:09.481Z" }, - { url = "https://files.pythonhosted.org/packages/3d/97/50228be003bb2802627d28ec0627837ac0bf35c90cf769812056f235b2d1/cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0", size = 181400, upload-time = "2024-09-04T20:44:10.873Z" }, - { url = "https://files.pythonhosted.org/packages/5a/84/e94227139ee5fb4d600a7a4927f322e1d4aea6fdc50bd3fca8493caba23f/cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4", size = 183178, upload-time = "2024-09-04T20:44:12.232Z" }, - { url = "https://files.pythonhosted.org/packages/da/ee/fb72c2b48656111c4ef27f0f91da355e130a923473bf5ee75c5643d00cca/cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c", size = 178840, upload-time = "2024-09-04T20:44:13.739Z" }, - { url = "https://files.pythonhosted.org/packages/cc/b6/db007700f67d151abadf508cbfd6a1884f57eab90b1bb985c4c8c02b0f28/cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36", size = 454803, upload-time = "2024-09-04T20:44:15.231Z" }, - { url = "https://files.pythonhosted.org/packages/1a/df/f8d151540d8c200eb1c6fba8cd0dfd40904f1b0682ea705c36e6c2e97ab3/cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5", size = 478850, upload-time = "2024-09-04T20:44:17.188Z" }, - { url = "https://files.pythonhosted.org/packages/28/c0/b31116332a547fd2677ae5b78a2ef662dfc8023d67f41b2a83f7c2aa78b1/cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff", size = 485729, upload-time = "2024-09-04T20:44:18.688Z" }, - { url = "https://files.pythonhosted.org/packages/91/2b/9a1ddfa5c7f13cab007a2c9cc295b70fbbda7cb10a286aa6810338e60ea1/cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99", size = 471256, upload-time = "2024-09-04T20:44:20.248Z" }, - { url = "https://files.pythonhosted.org/packages/b2/d5/da47df7004cb17e4955df6a43d14b3b4ae77737dff8bf7f8f333196717bf/cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93", size = 479424, upload-time = "2024-09-04T20:44:21.673Z" }, - { url = "https://files.pythonhosted.org/packages/0b/ac/2a28bcf513e93a219c8a4e8e125534f4f6db03e3179ba1c45e949b76212c/cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3", size = 484568, upload-time = "2024-09-04T20:44:23.245Z" }, - { url = "https://files.pythonhosted.org/packages/d4/38/ca8a4f639065f14ae0f1d9751e70447a261f1a30fa7547a828ae08142465/cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8", size = 488736, upload-time = "2024-09-04T20:44:24.757Z" }, - { url = "https://files.pythonhosted.org/packages/86/c5/28b2d6f799ec0bdecf44dced2ec5ed43e0eb63097b0f58c293583b406582/cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65", size = 172448, upload-time = "2024-09-04T20:44:26.208Z" }, - { url = "https://files.pythonhosted.org/packages/50/b9/db34c4755a7bd1cb2d1603ac3863f22bcecbd1ba29e5ee841a4bc510b294/cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903", size = 181976, upload-time = "2024-09-04T20:44:27.578Z" }, + { name = "pycparser", marker = "implementation_name != 'PyPy' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/eb/56/b1ba7935a17738ae8453301356628e8147c79dbb825bcbc73dc7401f9846/cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529", size = 523588, upload-time = "2025-09-08T23:24:04.541Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/93/d7/516d984057745a6cd96575eea814fe1edd6646ee6efd552fb7b0921dec83/cffi-2.0.0-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:0cf2d91ecc3fcc0625c2c530fe004f82c110405f101548512cce44322fa8ac44", size = 184283, upload-time = "2025-09-08T23:22:08.01Z" }, + { url = "https://files.pythonhosted.org/packages/9e/84/ad6a0b408daa859246f57c03efd28e5dd1b33c21737c2db84cae8c237aa5/cffi-2.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f73b96c41e3b2adedc34a7356e64c8eb96e03a3782b535e043a986276ce12a49", size = 180504, upload-time = "2025-09-08T23:22:10.637Z" }, + { url = "https://files.pythonhosted.org/packages/50/bd/b1a6362b80628111e6653c961f987faa55262b4002fcec42308cad1db680/cffi-2.0.0-cp310-cp310-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:53f77cbe57044e88bbd5ed26ac1d0514d2acf0591dd6bb02a3ae37f76811b80c", size = 208811, upload-time = "2025-09-08T23:22:12.267Z" }, + { url = "https://files.pythonhosted.org/packages/4f/27/6933a8b2562d7bd1fb595074cf99cc81fc3789f6a6c05cdabb46284a3188/cffi-2.0.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:3e837e369566884707ddaf85fc1744b47575005c0a229de3327f8f9a20f4efeb", size = 216402, upload-time = "2025-09-08T23:22:13.455Z" }, + { url = "https://files.pythonhosted.org/packages/05/eb/b86f2a2645b62adcfff53b0dd97e8dfafb5c8aa864bd0d9a2c2049a0d551/cffi-2.0.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:5eda85d6d1879e692d546a078b44251cdd08dd1cfb98dfb77b670c97cee49ea0", size = 203217, upload-time = "2025-09-08T23:22:14.596Z" }, + { url = "https://files.pythonhosted.org/packages/9f/e0/6cbe77a53acf5acc7c08cc186c9928864bd7c005f9efd0d126884858a5fe/cffi-2.0.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9332088d75dc3241c702d852d4671613136d90fa6881da7d770a483fd05248b4", size = 203079, upload-time = "2025-09-08T23:22:15.769Z" }, + { url = "https://files.pythonhosted.org/packages/98/29/9b366e70e243eb3d14a5cb488dfd3a0b6b2f1fb001a203f653b93ccfac88/cffi-2.0.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fc7de24befaeae77ba923797c7c87834c73648a05a4bde34b3b7e5588973a453", size = 216475, upload-time = "2025-09-08T23:22:17.427Z" }, + { url = "https://files.pythonhosted.org/packages/21/7a/13b24e70d2f90a322f2900c5d8e1f14fa7e2a6b3332b7309ba7b2ba51a5a/cffi-2.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cf364028c016c03078a23b503f02058f1814320a56ad535686f90565636a9495", size = 218829, upload-time = "2025-09-08T23:22:19.069Z" }, + { url = "https://files.pythonhosted.org/packages/60/99/c9dc110974c59cc981b1f5b66e1d8af8af764e00f0293266824d9c4254bc/cffi-2.0.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e11e82b744887154b182fd3e7e8512418446501191994dbf9c9fc1f32cc8efd5", size = 211211, upload-time = "2025-09-08T23:22:20.588Z" }, + { url = "https://files.pythonhosted.org/packages/49/72/ff2d12dbf21aca1b32a40ed792ee6b40f6dc3a9cf1644bd7ef6e95e0ac5e/cffi-2.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8ea985900c5c95ce9db1745f7933eeef5d314f0565b27625d9a10ec9881e1bfb", size = 218036, upload-time = "2025-09-08T23:22:22.143Z" }, + { url = "https://files.pythonhosted.org/packages/e2/cc/027d7fb82e58c48ea717149b03bcadcbdc293553edb283af792bd4bcbb3f/cffi-2.0.0-cp310-cp310-win32.whl", hash = "sha256:1f72fb8906754ac8a2cc3f9f5aaa298070652a0ffae577e0ea9bd480dc3c931a", size = 172184, upload-time = "2025-09-08T23:22:23.328Z" }, + { url = "https://files.pythonhosted.org/packages/33/fa/072dd15ae27fbb4e06b437eb6e944e75b068deb09e2a2826039e49ee2045/cffi-2.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:b18a3ed7d5b3bd8d9ef7a8cb226502c6bf8308df1525e1cc676c3680e7176739", size = 182790, upload-time = "2025-09-08T23:22:24.752Z" }, + { url = "https://files.pythonhosted.org/packages/12/4a/3dfd5f7850cbf0d06dc84ba9aa00db766b52ca38d8b86e3a38314d52498c/cffi-2.0.0-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:b4c854ef3adc177950a8dfc81a86f5115d2abd545751a304c5bcf2c2c7283cfe", size = 184344, upload-time = "2025-09-08T23:22:26.456Z" }, + { url = "https://files.pythonhosted.org/packages/4f/8b/f0e4c441227ba756aafbe78f117485b25bb26b1c059d01f137fa6d14896b/cffi-2.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2de9a304e27f7596cd03d16f1b7c72219bd944e99cc52b84d0145aefb07cbd3c", size = 180560, upload-time = "2025-09-08T23:22:28.197Z" }, + { url = "https://files.pythonhosted.org/packages/b1/b7/1200d354378ef52ec227395d95c2576330fd22a869f7a70e88e1447eb234/cffi-2.0.0-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:baf5215e0ab74c16e2dd324e8ec067ef59e41125d3eade2b863d294fd5035c92", size = 209613, upload-time = "2025-09-08T23:22:29.475Z" }, + { url = "https://files.pythonhosted.org/packages/b8/56/6033f5e86e8cc9bb629f0077ba71679508bdf54a9a5e112a3c0b91870332/cffi-2.0.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:730cacb21e1bdff3ce90babf007d0a0917cc3e6492f336c2f0134101e0944f93", size = 216476, upload-time = "2025-09-08T23:22:31.063Z" }, + { url = "https://files.pythonhosted.org/packages/dc/7f/55fecd70f7ece178db2f26128ec41430d8720f2d12ca97bf8f0a628207d5/cffi-2.0.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:6824f87845e3396029f3820c206e459ccc91760e8fa24422f8b0c3d1731cbec5", size = 203374, upload-time = "2025-09-08T23:22:32.507Z" }, + { url = "https://files.pythonhosted.org/packages/84/ef/a7b77c8bdc0f77adc3b46888f1ad54be8f3b7821697a7b89126e829e676a/cffi-2.0.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9de40a7b0323d889cf8d23d1ef214f565ab154443c42737dfe52ff82cf857664", size = 202597, upload-time = "2025-09-08T23:22:34.132Z" }, + { url = "https://files.pythonhosted.org/packages/d7/91/500d892b2bf36529a75b77958edfcd5ad8e2ce4064ce2ecfeab2125d72d1/cffi-2.0.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8941aaadaf67246224cee8c3803777eed332a19d909b47e29c9842ef1e79ac26", size = 215574, upload-time = "2025-09-08T23:22:35.443Z" }, + { url = "https://files.pythonhosted.org/packages/44/64/58f6255b62b101093d5df22dcb752596066c7e89dd725e0afaed242a61be/cffi-2.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a05d0c237b3349096d3981b727493e22147f934b20f6f125a3eba8f994bec4a9", size = 218971, upload-time = "2025-09-08T23:22:36.805Z" }, + { url = "https://files.pythonhosted.org/packages/ab/49/fa72cebe2fd8a55fbe14956f9970fe8eb1ac59e5df042f603ef7c8ba0adc/cffi-2.0.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:94698a9c5f91f9d138526b48fe26a199609544591f859c870d477351dc7b2414", size = 211972, upload-time = "2025-09-08T23:22:38.436Z" }, + { url = "https://files.pythonhosted.org/packages/0b/28/dd0967a76aab36731b6ebfe64dec4e981aff7e0608f60c2d46b46982607d/cffi-2.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5fed36fccc0612a53f1d4d9a816b50a36702c28a2aa880cb8a122b3466638743", size = 217078, upload-time = "2025-09-08T23:22:39.776Z" }, + { url = "https://files.pythonhosted.org/packages/2b/c0/015b25184413d7ab0a410775fdb4a50fca20f5589b5dab1dbbfa3baad8ce/cffi-2.0.0-cp311-cp311-win32.whl", hash = "sha256:c649e3a33450ec82378822b3dad03cc228b8f5963c0c12fc3b1e0ab940f768a5", size = 172076, upload-time = "2025-09-08T23:22:40.95Z" }, + { url = "https://files.pythonhosted.org/packages/ae/8f/dc5531155e7070361eb1b7e4c1a9d896d0cb21c49f807a6c03fd63fc877e/cffi-2.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:66f011380d0e49ed280c789fbd08ff0d40968ee7b665575489afa95c98196ab5", size = 182820, upload-time = "2025-09-08T23:22:42.463Z" }, + { url = "https://files.pythonhosted.org/packages/95/5c/1b493356429f9aecfd56bc171285a4c4ac8697f76e9bbbbb105e537853a1/cffi-2.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:c6638687455baf640e37344fe26d37c404db8b80d037c3d29f58fe8d1c3b194d", size = 177635, upload-time = "2025-09-08T23:22:43.623Z" }, + { url = "https://files.pythonhosted.org/packages/ea/47/4f61023ea636104d4f16ab488e268b93008c3d0bb76893b1b31db1f96802/cffi-2.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d02d6655b0e54f54c4ef0b94eb6be0607b70853c45ce98bd278dc7de718be5d", size = 185271, upload-time = "2025-09-08T23:22:44.795Z" }, + { url = "https://files.pythonhosted.org/packages/df/a2/781b623f57358e360d62cdd7a8c681f074a71d445418a776eef0aadb4ab4/cffi-2.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8eca2a813c1cb7ad4fb74d368c2ffbbb4789d377ee5bb8df98373c2cc0dee76c", size = 181048, upload-time = "2025-09-08T23:22:45.938Z" }, + { url = "https://files.pythonhosted.org/packages/ff/df/a4f0fbd47331ceeba3d37c2e51e9dfc9722498becbeec2bd8bc856c9538a/cffi-2.0.0-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:21d1152871b019407d8ac3985f6775c079416c282e431a4da6afe7aefd2bccbe", size = 212529, upload-time = "2025-09-08T23:22:47.349Z" }, + { url = "https://files.pythonhosted.org/packages/d5/72/12b5f8d3865bf0f87cf1404d8c374e7487dcf097a1c91c436e72e6badd83/cffi-2.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b21e08af67b8a103c71a250401c78d5e0893beff75e28c53c98f4de42f774062", size = 220097, upload-time = "2025-09-08T23:22:48.677Z" }, + { url = "https://files.pythonhosted.org/packages/c2/95/7a135d52a50dfa7c882ab0ac17e8dc11cec9d55d2c18dda414c051c5e69e/cffi-2.0.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:1e3a615586f05fc4065a8b22b8152f0c1b00cdbc60596d187c2a74f9e3036e4e", size = 207983, upload-time = "2025-09-08T23:22:50.06Z" }, + { url = "https://files.pythonhosted.org/packages/3a/c8/15cb9ada8895957ea171c62dc78ff3e99159ee7adb13c0123c001a2546c1/cffi-2.0.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:81afed14892743bbe14dacb9e36d9e0e504cd204e0b165062c488942b9718037", size = 206519, upload-time = "2025-09-08T23:22:51.364Z" }, + { url = "https://files.pythonhosted.org/packages/78/2d/7fa73dfa841b5ac06c7b8855cfc18622132e365f5b81d02230333ff26e9e/cffi-2.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3e17ed538242334bf70832644a32a7aae3d83b57567f9fd60a26257e992b79ba", size = 219572, upload-time = "2025-09-08T23:22:52.902Z" }, + { url = "https://files.pythonhosted.org/packages/07/e0/267e57e387b4ca276b90f0434ff88b2c2241ad72b16d31836adddfd6031b/cffi-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3925dd22fa2b7699ed2617149842d2e6adde22b262fcbfada50e3d195e4b3a94", size = 222963, upload-time = "2025-09-08T23:22:54.518Z" }, + { url = "https://files.pythonhosted.org/packages/b6/75/1f2747525e06f53efbd878f4d03bac5b859cbc11c633d0fb81432d98a795/cffi-2.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2c8f814d84194c9ea681642fd164267891702542f028a15fc97d4674b6206187", size = 221361, upload-time = "2025-09-08T23:22:55.867Z" }, + { url = "https://files.pythonhosted.org/packages/7b/2b/2b6435f76bfeb6bbf055596976da087377ede68df465419d192acf00c437/cffi-2.0.0-cp312-cp312-win32.whl", hash = "sha256:da902562c3e9c550df360bfa53c035b2f241fed6d9aef119048073680ace4a18", size = 172932, upload-time = "2025-09-08T23:22:57.188Z" }, + { url = "https://files.pythonhosted.org/packages/f8/ed/13bd4418627013bec4ed6e54283b1959cf6db888048c7cf4b4c3b5b36002/cffi-2.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:da68248800ad6320861f129cd9c1bf96ca849a2771a59e0344e88681905916f5", size = 183557, upload-time = "2025-09-08T23:22:58.351Z" }, + { url = "https://files.pythonhosted.org/packages/95/31/9f7f93ad2f8eff1dbc1c3656d7ca5bfd8fb52c9d786b4dcf19b2d02217fa/cffi-2.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:4671d9dd5ec934cb9a73e7ee9676f9362aba54f7f34910956b84d727b0d73fb6", size = 177762, upload-time = "2025-09-08T23:22:59.668Z" }, ] [[package]] name = "charset-normalizer" -version = "3.4.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e4/33/89c2ced2b67d1c2a61c19c6751aa8902d46ce3dacb23600a283619f5a12d/charset_normalizer-3.4.2.tar.gz", hash = "sha256:5baececa9ecba31eff645232d59845c07aa030f0c81ee70184a90d35099a0e63", size = 126367, upload-time = "2025-05-02T08:34:42.01Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/95/28/9901804da60055b406e1a1c5ba7aac1276fb77f1dde635aabfc7fd84b8ab/charset_normalizer-3.4.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7c48ed483eb946e6c04ccbe02c6b4d1d48e51944b6db70f697e089c193404941", size = 201818, upload-time = "2025-05-02T08:31:46.725Z" }, - { url = "https://files.pythonhosted.org/packages/d9/9b/892a8c8af9110935e5adcbb06d9c6fe741b6bb02608c6513983048ba1a18/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2d318c11350e10662026ad0eb71bb51c7812fc8590825304ae0bdd4ac283acd", size = 144649, upload-time = "2025-05-02T08:31:48.889Z" }, - { url = "https://files.pythonhosted.org/packages/7b/a5/4179abd063ff6414223575e008593861d62abfc22455b5d1a44995b7c101/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9cbfacf36cb0ec2897ce0ebc5d08ca44213af24265bd56eca54bee7923c48fd6", size = 155045, upload-time = "2025-05-02T08:31:50.757Z" }, - { url = "https://files.pythonhosted.org/packages/3b/95/bc08c7dfeddd26b4be8c8287b9bb055716f31077c8b0ea1cd09553794665/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18dd2e350387c87dabe711b86f83c9c78af772c748904d372ade190b5c7c9d4d", size = 147356, upload-time = "2025-05-02T08:31:52.634Z" }, - { url = "https://files.pythonhosted.org/packages/a8/2d/7a5b635aa65284bf3eab7653e8b4151ab420ecbae918d3e359d1947b4d61/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8075c35cd58273fee266c58c0c9b670947c19df5fb98e7b66710e04ad4e9ff86", size = 149471, upload-time = "2025-05-02T08:31:56.207Z" }, - { url = "https://files.pythonhosted.org/packages/ae/38/51fc6ac74251fd331a8cfdb7ec57beba8c23fd5493f1050f71c87ef77ed0/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5bf4545e3b962767e5c06fe1738f951f77d27967cb2caa64c28be7c4563e162c", size = 151317, upload-time = "2025-05-02T08:31:57.613Z" }, - { url = "https://files.pythonhosted.org/packages/b7/17/edee1e32215ee6e9e46c3e482645b46575a44a2d72c7dfd49e49f60ce6bf/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7a6ab32f7210554a96cd9e33abe3ddd86732beeafc7a28e9955cdf22ffadbab0", size = 146368, upload-time = "2025-05-02T08:31:59.468Z" }, - { url = "https://files.pythonhosted.org/packages/26/2c/ea3e66f2b5f21fd00b2825c94cafb8c326ea6240cd80a91eb09e4a285830/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b33de11b92e9f75a2b545d6e9b6f37e398d86c3e9e9653c4864eb7e89c5773ef", size = 154491, upload-time = "2025-05-02T08:32:01.219Z" }, - { url = "https://files.pythonhosted.org/packages/52/47/7be7fa972422ad062e909fd62460d45c3ef4c141805b7078dbab15904ff7/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:8755483f3c00d6c9a77f490c17e6ab0c8729e39e6390328e42521ef175380ae6", size = 157695, upload-time = "2025-05-02T08:32:03.045Z" }, - { url = "https://files.pythonhosted.org/packages/2f/42/9f02c194da282b2b340f28e5fb60762de1151387a36842a92b533685c61e/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:68a328e5f55ec37c57f19ebb1fdc56a248db2e3e9ad769919a58672958e8f366", size = 154849, upload-time = "2025-05-02T08:32:04.651Z" }, - { url = "https://files.pythonhosted.org/packages/67/44/89cacd6628f31fb0b63201a618049be4be2a7435a31b55b5eb1c3674547a/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:21b2899062867b0e1fde9b724f8aecb1af14f2778d69aacd1a5a1853a597a5db", size = 150091, upload-time = "2025-05-02T08:32:06.719Z" }, - { url = "https://files.pythonhosted.org/packages/1f/79/4b8da9f712bc079c0f16b6d67b099b0b8d808c2292c937f267d816ec5ecc/charset_normalizer-3.4.2-cp310-cp310-win32.whl", hash = "sha256:e8082b26888e2f8b36a042a58307d5b917ef2b1cacab921ad3323ef91901c71a", size = 98445, upload-time = "2025-05-02T08:32:08.66Z" }, - { url = "https://files.pythonhosted.org/packages/7d/d7/96970afb4fb66497a40761cdf7bd4f6fca0fc7bafde3a84f836c1f57a926/charset_normalizer-3.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:f69a27e45c43520f5487f27627059b64aaf160415589230992cec34c5e18a509", size = 105782, upload-time = "2025-05-02T08:32:10.46Z" }, - { url = "https://files.pythonhosted.org/packages/05/85/4c40d00dcc6284a1c1ad5de5e0996b06f39d8232f1031cd23c2f5c07ee86/charset_normalizer-3.4.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:be1e352acbe3c78727a16a455126d9ff83ea2dfdcbc83148d2982305a04714c2", size = 198794, upload-time = "2025-05-02T08:32:11.945Z" }, - { url = "https://files.pythonhosted.org/packages/41/d9/7a6c0b9db952598e97e93cbdfcb91bacd89b9b88c7c983250a77c008703c/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa88ca0b1932e93f2d961bf3addbb2db902198dca337d88c89e1559e066e7645", size = 142846, upload-time = "2025-05-02T08:32:13.946Z" }, - { url = "https://files.pythonhosted.org/packages/66/82/a37989cda2ace7e37f36c1a8ed16c58cf48965a79c2142713244bf945c89/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d524ba3f1581b35c03cb42beebab4a13e6cdad7b36246bd22541fa585a56cccd", size = 153350, upload-time = "2025-05-02T08:32:15.873Z" }, - { url = "https://files.pythonhosted.org/packages/df/68/a576b31b694d07b53807269d05ec3f6f1093e9545e8607121995ba7a8313/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28a1005facc94196e1fb3e82a3d442a9d9110b8434fc1ded7a24a2983c9888d8", size = 145657, upload-time = "2025-05-02T08:32:17.283Z" }, - { url = "https://files.pythonhosted.org/packages/92/9b/ad67f03d74554bed3aefd56fe836e1623a50780f7c998d00ca128924a499/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fdb20a30fe1175ecabed17cbf7812f7b804b8a315a25f24678bcdf120a90077f", size = 147260, upload-time = "2025-05-02T08:32:18.807Z" }, - { url = "https://files.pythonhosted.org/packages/a6/e6/8aebae25e328160b20e31a7e9929b1578bbdc7f42e66f46595a432f8539e/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0f5d9ed7f254402c9e7d35d2f5972c9bbea9040e99cd2861bd77dc68263277c7", size = 149164, upload-time = "2025-05-02T08:32:20.333Z" }, - { url = "https://files.pythonhosted.org/packages/8b/f2/b3c2f07dbcc248805f10e67a0262c93308cfa149a4cd3d1fe01f593e5fd2/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:efd387a49825780ff861998cd959767800d54f8308936b21025326de4b5a42b9", size = 144571, upload-time = "2025-05-02T08:32:21.86Z" }, - { url = "https://files.pythonhosted.org/packages/60/5b/c3f3a94bc345bc211622ea59b4bed9ae63c00920e2e8f11824aa5708e8b7/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f0aa37f3c979cf2546b73e8222bbfa3dc07a641585340179d768068e3455e544", size = 151952, upload-time = "2025-05-02T08:32:23.434Z" }, - { url = "https://files.pythonhosted.org/packages/e2/4d/ff460c8b474122334c2fa394a3f99a04cf11c646da895f81402ae54f5c42/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e70e990b2137b29dc5564715de1e12701815dacc1d056308e2b17e9095372a82", size = 155959, upload-time = "2025-05-02T08:32:24.993Z" }, - { url = "https://files.pythonhosted.org/packages/a2/2b/b964c6a2fda88611a1fe3d4c400d39c66a42d6c169c924818c848f922415/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:0c8c57f84ccfc871a48a47321cfa49ae1df56cd1d965a09abe84066f6853b9c0", size = 153030, upload-time = "2025-05-02T08:32:26.435Z" }, - { url = "https://files.pythonhosted.org/packages/59/2e/d3b9811db26a5ebf444bc0fa4f4be5aa6d76fc6e1c0fd537b16c14e849b6/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6b66f92b17849b85cad91259efc341dce9c1af48e2173bf38a85c6329f1033e5", size = 148015, upload-time = "2025-05-02T08:32:28.376Z" }, - { url = "https://files.pythonhosted.org/packages/90/07/c5fd7c11eafd561bb51220d600a788f1c8d77c5eef37ee49454cc5c35575/charset_normalizer-3.4.2-cp311-cp311-win32.whl", hash = "sha256:daac4765328a919a805fa5e2720f3e94767abd632ae410a9062dff5412bae65a", size = 98106, upload-time = "2025-05-02T08:32:30.281Z" }, - { url = "https://files.pythonhosted.org/packages/a8/05/5e33dbef7e2f773d672b6d79f10ec633d4a71cd96db6673625838a4fd532/charset_normalizer-3.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:e53efc7c7cee4c1e70661e2e112ca46a575f90ed9ae3fef200f2a25e954f4b28", size = 105402, upload-time = "2025-05-02T08:32:32.191Z" }, - { url = "https://files.pythonhosted.org/packages/d7/a4/37f4d6035c89cac7930395a35cc0f1b872e652eaafb76a6075943754f095/charset_normalizer-3.4.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0c29de6a1a95f24b9a1aa7aefd27d2487263f00dfd55a77719b530788f75cff7", size = 199936, upload-time = "2025-05-02T08:32:33.712Z" }, - { url = "https://files.pythonhosted.org/packages/ee/8a/1a5e33b73e0d9287274f899d967907cd0bf9c343e651755d9307e0dbf2b3/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cddf7bd982eaa998934a91f69d182aec997c6c468898efe6679af88283b498d3", size = 143790, upload-time = "2025-05-02T08:32:35.768Z" }, - { url = "https://files.pythonhosted.org/packages/66/52/59521f1d8e6ab1482164fa21409c5ef44da3e9f653c13ba71becdd98dec3/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcbe676a55d7445b22c10967bceaaf0ee69407fbe0ece4d032b6eb8d4565982a", size = 153924, upload-time = "2025-05-02T08:32:37.284Z" }, - { url = "https://files.pythonhosted.org/packages/86/2d/fb55fdf41964ec782febbf33cb64be480a6b8f16ded2dbe8db27a405c09f/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d41c4d287cfc69060fa91cae9683eacffad989f1a10811995fa309df656ec214", size = 146626, upload-time = "2025-05-02T08:32:38.803Z" }, - { url = "https://files.pythonhosted.org/packages/8c/73/6ede2ec59bce19b3edf4209d70004253ec5f4e319f9a2e3f2f15601ed5f7/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e594135de17ab3866138f496755f302b72157d115086d100c3f19370839dd3a", size = 148567, upload-time = "2025-05-02T08:32:40.251Z" }, - { url = "https://files.pythonhosted.org/packages/09/14/957d03c6dc343c04904530b6bef4e5efae5ec7d7990a7cbb868e4595ee30/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf713fe9a71ef6fd5adf7a79670135081cd4431c2943864757f0fa3a65b1fafd", size = 150957, upload-time = "2025-05-02T08:32:41.705Z" }, - { url = "https://files.pythonhosted.org/packages/0d/c8/8174d0e5c10ccebdcb1b53cc959591c4c722a3ad92461a273e86b9f5a302/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a370b3e078e418187da8c3674eddb9d983ec09445c99a3a263c2011993522981", size = 145408, upload-time = "2025-05-02T08:32:43.709Z" }, - { url = "https://files.pythonhosted.org/packages/58/aa/8904b84bc8084ac19dc52feb4f5952c6df03ffb460a887b42615ee1382e8/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a955b438e62efdf7e0b7b52a64dc5c3396e2634baa62471768a64bc2adb73d5c", size = 153399, upload-time = "2025-05-02T08:32:46.197Z" }, - { url = "https://files.pythonhosted.org/packages/c2/26/89ee1f0e264d201cb65cf054aca6038c03b1a0c6b4ae998070392a3ce605/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:7222ffd5e4de8e57e03ce2cef95a4c43c98fcb72ad86909abdfc2c17d227fc1b", size = 156815, upload-time = "2025-05-02T08:32:48.105Z" }, - { url = "https://files.pythonhosted.org/packages/fd/07/68e95b4b345bad3dbbd3a8681737b4338ff2c9df29856a6d6d23ac4c73cb/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:bee093bf902e1d8fc0ac143c88902c3dfc8941f7ea1d6a8dd2bcb786d33db03d", size = 154537, upload-time = "2025-05-02T08:32:49.719Z" }, - { url = "https://files.pythonhosted.org/packages/77/1a/5eefc0ce04affb98af07bc05f3bac9094513c0e23b0562d64af46a06aae4/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dedb8adb91d11846ee08bec4c8236c8549ac721c245678282dcb06b221aab59f", size = 149565, upload-time = "2025-05-02T08:32:51.404Z" }, - { url = "https://files.pythonhosted.org/packages/37/a0/2410e5e6032a174c95e0806b1a6585eb21e12f445ebe239fac441995226a/charset_normalizer-3.4.2-cp312-cp312-win32.whl", hash = "sha256:db4c7bf0e07fc3b7d89ac2a5880a6a8062056801b83ff56d8464b70f65482b6c", size = 98357, upload-time = "2025-05-02T08:32:53.079Z" }, - { url = "https://files.pythonhosted.org/packages/6c/4f/c02d5c493967af3eda9c771ad4d2bbc8df6f99ddbeb37ceea6e8716a32bc/charset_normalizer-3.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:5a9979887252a82fefd3d3ed2a8e3b937a7a809f65dcb1e068b090e165bbe99e", size = 105776, upload-time = "2025-05-02T08:32:54.573Z" }, - { url = "https://files.pythonhosted.org/packages/20/94/c5790835a017658cbfabd07f3bfb549140c3ac458cfc196323996b10095a/charset_normalizer-3.4.2-py3-none-any.whl", hash = "sha256:7f56930ab0abd1c45cd15be65cc741c28b1c9a34876ce8c17a2fa107810c0af0", size = 52626, upload-time = "2025-05-02T08:34:40.053Z" }, +version = "3.4.7" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e7/a1/67fe25fac3c7642725500a3f6cfe5821ad557c3abb11c9d20d12c7008d3e/charset_normalizer-3.4.7.tar.gz", hash = "sha256:ae89db9e5f98a11a4bf50407d4363e7b09b31e55bc117b4f7d80aab97ba009e5", size = 144271, upload-time = "2026-04-02T09:28:39.342Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/26/08/0f303cb0b529e456bb116f2d50565a482694fbb94340bf56d44677e7ed03/charset_normalizer-3.4.7-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cdd68a1fb318e290a2077696b7eb7a21a49163c455979c639bf5a5dcdc46617d", size = 315182, upload-time = "2026-04-02T09:25:40.673Z" }, + { url = "https://files.pythonhosted.org/packages/24/47/b192933e94b546f1b1fe4df9cc1f84fcdbf2359f8d1081d46dd029b50207/charset_normalizer-3.4.7-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e17b8d5d6a8c47c85e68ca8379def1303fd360c3e22093a807cd34a71cd082b8", size = 209329, upload-time = "2026-04-02T09:25:42.354Z" }, + { url = "https://files.pythonhosted.org/packages/c2/b4/01fa81c5ca6141024d89a8fc15968002b71da7f825dd14113207113fabbd/charset_normalizer-3.4.7-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:511ef87c8aec0783e08ac18565a16d435372bc1ac25a91e6ac7f5ef2b0bff790", size = 231230, upload-time = "2026-04-02T09:25:44.281Z" }, + { url = "https://files.pythonhosted.org/packages/20/f7/7b991776844dfa058017e600e6e55ff01984a063290ca5622c0b63162f68/charset_normalizer-3.4.7-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:007d05ec7321d12a40227aae9e2bc6dca73f3cb21058999a1df9e193555a9dcc", size = 225890, upload-time = "2026-04-02T09:25:45.475Z" }, + { url = "https://files.pythonhosted.org/packages/20/e7/bed0024a0f4ab0c8a9c64d4445f39b30c99bd1acd228291959e3de664247/charset_normalizer-3.4.7-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cf29836da5119f3c8a8a70667b0ef5fdca3bb12f80fd06487cfa575b3909b393", size = 216930, upload-time = "2026-04-02T09:25:46.58Z" }, + { url = "https://files.pythonhosted.org/packages/e2/ab/b18f0ab31cdd7b3ddb8bb76c4a414aeb8160c9810fdf1bc62f269a539d87/charset_normalizer-3.4.7-cp310-cp310-manylinux_2_31_armv7l.whl", hash = "sha256:12d8baf840cc7889b37c7c770f478adea7adce3dcb3944d02ec87508e2dcf153", size = 202109, upload-time = "2026-04-02T09:25:48.031Z" }, + { url = "https://files.pythonhosted.org/packages/82/e5/7e9440768a06dfb3075936490cb82dbf0ee20a133bf0dd8551fa096914ec/charset_normalizer-3.4.7-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:d560742f3c0d62afaccf9f41fe485ed69bd7661a241f86a3ef0f0fb8b1a397af", size = 214684, upload-time = "2026-04-02T09:25:49.245Z" }, + { url = "https://files.pythonhosted.org/packages/71/94/8c61d8da9f062fdf457c80acfa25060ec22bf1d34bbeaca4350f13bcfd07/charset_normalizer-3.4.7-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b14b2d9dac08e28bb8046a1a0434b1750eb221c8f5b87a68f4fa11a6f97b5e34", size = 212785, upload-time = "2026-04-02T09:25:50.671Z" }, + { url = "https://files.pythonhosted.org/packages/66/cd/6e9889c648e72c0ab2e5967528bb83508f354d706637bc7097190c874e13/charset_normalizer-3.4.7-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:bc17a677b21b3502a21f66a8cc64f5bfad4df8a0b8434d661666f8ce90ac3af1", size = 203055, upload-time = "2026-04-02T09:25:51.802Z" }, + { url = "https://files.pythonhosted.org/packages/92/2e/7a951d6a08aefb7eb8e1b54cdfb580b1365afdd9dd484dc4bee9e5d8f258/charset_normalizer-3.4.7-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:750e02e074872a3fad7f233b47734166440af3cdea0add3e95163110816d6752", size = 232502, upload-time = "2026-04-02T09:25:53.388Z" }, + { url = "https://files.pythonhosted.org/packages/58/d5/abcf2d83bf8e0a1286df55cd0dc1d49af0da4282aa77e986df343e7de124/charset_normalizer-3.4.7-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:4e5163c14bffd570ef2affbfdd77bba66383890797df43dc8b4cc7d6f500bf53", size = 214295, upload-time = "2026-04-02T09:25:54.765Z" }, + { url = "https://files.pythonhosted.org/packages/47/3a/7d4cd7ed54be99973a0dc176032cba5cb1f258082c31fa6df35cff46acfc/charset_normalizer-3.4.7-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:6ed74185b2db44f41ef35fd1617c5888e59792da9bbc9190d6c7300617182616", size = 227145, upload-time = "2026-04-02T09:25:55.904Z" }, + { url = "https://files.pythonhosted.org/packages/1d/98/3a45bf8247889cf28262ebd3d0872edff11565b2a1e3064ccb132db3fbb0/charset_normalizer-3.4.7-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:94e1885b270625a9a828c9793b4d52a64445299baa1fea5a173bf1d3dd9a1a5a", size = 218884, upload-time = "2026-04-02T09:25:57.074Z" }, + { url = "https://files.pythonhosted.org/packages/ad/80/2e8b7f8915ed5c9ef13aa828d82738e33888c485b65ebf744d615040c7ea/charset_normalizer-3.4.7-cp310-cp310-win32.whl", hash = "sha256:6785f414ae0f3c733c437e0f3929197934f526d19dfaa75e18fdb4f94c6fb374", size = 148343, upload-time = "2026-04-02T09:25:58.199Z" }, + { url = "https://files.pythonhosted.org/packages/35/1b/3b8c8c77184af465ee9ad88b5aea46ea6b2e1f7b9dc9502891e37af21e30/charset_normalizer-3.4.7-cp310-cp310-win_amd64.whl", hash = "sha256:6696b7688f54f5af4462118f0bfa7c1621eeb87154f77fa04b9295ce7a8f2943", size = 159174, upload-time = "2026-04-02T09:25:59.322Z" }, + { url = "https://files.pythonhosted.org/packages/be/c1/feb40dca40dbb21e0a908801782d9288c64fc8d8e562c2098e9994c8c21b/charset_normalizer-3.4.7-cp310-cp310-win_arm64.whl", hash = "sha256:66671f93accb62ed07da56613636f3641f1a12c13046ce91ffc923721f23c008", size = 147805, upload-time = "2026-04-02T09:26:00.756Z" }, + { url = "https://files.pythonhosted.org/packages/c2/d7/b5b7020a0565c2e9fa8c09f4b5fa6232feb326b8c20081ccded47ea368fd/charset_normalizer-3.4.7-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7641bb8895e77f921102f72833904dcd9901df5d6d72a2ab8f31d04b7e51e4e7", size = 309705, upload-time = "2026-04-02T09:26:02.191Z" }, + { url = "https://files.pythonhosted.org/packages/5a/53/58c29116c340e5456724ecd2fff4196d236b98f3da97b404bc5e51ac3493/charset_normalizer-3.4.7-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:202389074300232baeb53ae2569a60901f7efadd4245cf3a3bf0617d60b439d7", size = 206419, upload-time = "2026-04-02T09:26:03.583Z" }, + { url = "https://files.pythonhosted.org/packages/b2/02/e8146dc6591a37a00e5144c63f29fb7c97a734ea8a111190783c0e60ab63/charset_normalizer-3.4.7-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:30b8d1d8c52a48c2c5690e152c169b673487a2a58de1ec7393196753063fcd5e", size = 227901, upload-time = "2026-04-02T09:26:04.738Z" }, + { url = "https://files.pythonhosted.org/packages/fb/73/77486c4cd58f1267bf17db420e930c9afa1b3be3fe8c8b8ebbebc9624359/charset_normalizer-3.4.7-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:532bc9bf33a68613fd7d65e4b1c71a6a38d7d42604ecf239c77392e9b4e8998c", size = 222742, upload-time = "2026-04-02T09:26:06.36Z" }, + { url = "https://files.pythonhosted.org/packages/a1/fa/f74eb381a7d94ded44739e9d94de18dc5edc9c17fb8c11f0a6890696c0a9/charset_normalizer-3.4.7-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2fe249cb4651fd12605b7288b24751d8bfd46d35f12a20b1ba33dea122e690df", size = 214061, upload-time = "2026-04-02T09:26:08.347Z" }, + { url = "https://files.pythonhosted.org/packages/dc/92/42bd3cefcf7687253fb86694b45f37b733c97f59af3724f356fa92b8c344/charset_normalizer-3.4.7-cp311-cp311-manylinux_2_31_armv7l.whl", hash = "sha256:65bcd23054beab4d166035cabbc868a09c1a49d1efe458fe8e4361215df40265", size = 199239, upload-time = "2026-04-02T09:26:09.823Z" }, + { url = "https://files.pythonhosted.org/packages/4c/3d/069e7184e2aa3b3cddc700e3dd267413dc259854adc3380421c805c6a17d/charset_normalizer-3.4.7-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:08e721811161356f97b4059a9ba7bafb23ea5ee2255402c42881c214e173c6b4", size = 210173, upload-time = "2026-04-02T09:26:10.953Z" }, + { url = "https://files.pythonhosted.org/packages/62/51/9d56feb5f2e7074c46f93e0ebdbe61f0848ee246e2f0d89f8e20b89ebb8f/charset_normalizer-3.4.7-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e060d01aec0a910bdccb8be71faf34e7799ce36950f8294c8bf612cba65a2c9e", size = 209841, upload-time = "2026-04-02T09:26:12.142Z" }, + { url = "https://files.pythonhosted.org/packages/d2/59/893d8f99cc4c837dda1fe2f1139079703deb9f321aabcb032355de13b6c7/charset_normalizer-3.4.7-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:38c0109396c4cfc574d502df99742a45c72c08eff0a36158b6f04000043dbf38", size = 200304, upload-time = "2026-04-02T09:26:13.711Z" }, + { url = "https://files.pythonhosted.org/packages/7d/1d/ee6f3be3464247578d1ed5c46de545ccc3d3ff933695395c402c21fa6b77/charset_normalizer-3.4.7-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:1c2a768fdd44ee4a9339a9b0b130049139b8ce3c01d2ce09f67f5a68048d477c", size = 229455, upload-time = "2026-04-02T09:26:14.941Z" }, + { url = "https://files.pythonhosted.org/packages/54/bb/8fb0a946296ea96a488928bdce8ef99023998c48e4713af533e9bb98ef07/charset_normalizer-3.4.7-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:1a87ca9d5df6fe460483d9a5bbf2b18f620cbed41b432e2bddb686228282d10b", size = 210036, upload-time = "2026-04-02T09:26:16.478Z" }, + { url = "https://files.pythonhosted.org/packages/9a/bc/015b2387f913749f82afd4fcba07846d05b6d784dd16123cb66860e0237d/charset_normalizer-3.4.7-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:d635aab80466bc95771bb78d5370e74d36d1fe31467b6b29b8b57b2a3cd7d22c", size = 224739, upload-time = "2026-04-02T09:26:17.751Z" }, + { url = "https://files.pythonhosted.org/packages/17/ab/63133691f56baae417493cba6b7c641571a2130eb7bceba6773367ab9ec5/charset_normalizer-3.4.7-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ae196f021b5e7c78e918242d217db021ed2a6ace2bc6ae94c0fc596221c7f58d", size = 216277, upload-time = "2026-04-02T09:26:18.981Z" }, + { url = "https://files.pythonhosted.org/packages/06/6d/3be70e827977f20db77c12a97e6a9f973631a45b8d186c084527e53e77a4/charset_normalizer-3.4.7-cp311-cp311-win32.whl", hash = "sha256:adb2597b428735679446b46c8badf467b4ca5f5056aae4d51a19f9570301b1ad", size = 147819, upload-time = "2026-04-02T09:26:20.295Z" }, + { url = "https://files.pythonhosted.org/packages/20/d9/5f67790f06b735d7c7637171bbfd89882ad67201891b7275e51116ed8207/charset_normalizer-3.4.7-cp311-cp311-win_amd64.whl", hash = "sha256:8e385e4267ab76874ae30db04c627faaaf0b509e1ccc11a95b3fc3e83f855c00", size = 159281, upload-time = "2026-04-02T09:26:21.74Z" }, + { url = "https://files.pythonhosted.org/packages/ca/83/6413f36c5a34afead88ce6f66684d943d91f233d76dd083798f9602b75ae/charset_normalizer-3.4.7-cp311-cp311-win_arm64.whl", hash = "sha256:d4a48e5b3c2a489fae013b7589308a40146ee081f6f509e047e0e096084ceca1", size = 147843, upload-time = "2026-04-02T09:26:22.901Z" }, + { url = "https://files.pythonhosted.org/packages/0c/eb/4fc8d0a7110eb5fc9cc161723a34a8a6c200ce3b4fbf681bc86feee22308/charset_normalizer-3.4.7-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:eca9705049ad3c7345d574e3510665cb2cf844c2f2dcfe675332677f081cbd46", size = 311328, upload-time = "2026-04-02T09:26:24.331Z" }, + { url = "https://files.pythonhosted.org/packages/f8/e3/0fadc706008ac9d7b9b5be6dc767c05f9d3e5df51744ce4cc9605de7b9f4/charset_normalizer-3.4.7-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6178f72c5508bfc5fd446a5905e698c6212932f25bcdd4b47a757a50605a90e2", size = 208061, upload-time = "2026-04-02T09:26:25.568Z" }, + { url = "https://files.pythonhosted.org/packages/42/f0/3dd1045c47f4a4604df85ec18ad093912ae1344ac706993aff91d38773a2/charset_normalizer-3.4.7-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e1421b502d83040e6d7fb2fb18dff63957f720da3d77b2fbd3187ceb63755d7b", size = 229031, upload-time = "2026-04-02T09:26:26.865Z" }, + { url = "https://files.pythonhosted.org/packages/dc/67/675a46eb016118a2fbde5a277a5d15f4f69d5f3f5f338e5ee2f8948fcf43/charset_normalizer-3.4.7-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:edac0f1ab77644605be2cbba52e6b7f630731fc42b34cb0f634be1a6eface56a", size = 225239, upload-time = "2026-04-02T09:26:28.044Z" }, + { url = "https://files.pythonhosted.org/packages/4b/f8/d0118a2f5f23b02cd166fa385c60f9b0d4f9194f574e2b31cef350ad7223/charset_normalizer-3.4.7-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5649fd1c7bade02f320a462fdefd0b4bd3ce036065836d4f42e0de958038e116", size = 216589, upload-time = "2026-04-02T09:26:29.239Z" }, + { url = "https://files.pythonhosted.org/packages/b1/f1/6d2b0b261b6c4ceef0fcb0d17a01cc5bc53586c2d4796fa04b5c540bc13d/charset_normalizer-3.4.7-cp312-cp312-manylinux_2_31_armv7l.whl", hash = "sha256:203104ed3e428044fd943bc4bf45fa73c0730391f9621e37fe39ecf477b128cb", size = 202733, upload-time = "2026-04-02T09:26:30.5Z" }, + { url = "https://files.pythonhosted.org/packages/6f/c0/7b1f943f7e87cc3db9626ba17807d042c38645f0a1d4415c7a14afb5591f/charset_normalizer-3.4.7-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:298930cec56029e05497a76988377cbd7457ba864beeea92ad7e844fe74cd1f1", size = 212652, upload-time = "2026-04-02T09:26:31.709Z" }, + { url = "https://files.pythonhosted.org/packages/38/dd/5a9ab159fe45c6e72079398f277b7d2b523e7f716acc489726115a910097/charset_normalizer-3.4.7-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:708838739abf24b2ceb208d0e22403dd018faeef86ddac04319a62ae884c4f15", size = 211229, upload-time = "2026-04-02T09:26:33.282Z" }, + { url = "https://files.pythonhosted.org/packages/d5/ff/531a1cad5ca855d1c1a8b69cb71abfd6d85c0291580146fda7c82857caa1/charset_normalizer-3.4.7-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:0f7eb884681e3938906ed0434f20c63046eacd0111c4ba96f27b76084cd679f5", size = 203552, upload-time = "2026-04-02T09:26:34.845Z" }, + { url = "https://files.pythonhosted.org/packages/c1/4c/a5fb52d528a8ca41f7598cb619409ece30a169fbdf9cdce592e53b46c3a6/charset_normalizer-3.4.7-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4dc1e73c36828f982bfe79fadf5919923f8a6f4df2860804db9a98c48824ce8d", size = 230806, upload-time = "2026-04-02T09:26:36.152Z" }, + { url = "https://files.pythonhosted.org/packages/59/7a/071feed8124111a32b316b33ae4de83d36923039ef8cf48120266844285b/charset_normalizer-3.4.7-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:aed52fea0513bac0ccde438c188c8a471c4e0f457c2dd20cdbf6ea7a450046c7", size = 212316, upload-time = "2026-04-02T09:26:37.672Z" }, + { url = "https://files.pythonhosted.org/packages/fd/35/f7dba3994312d7ba508e041eaac39a36b120f32d4c8662b8814dab876431/charset_normalizer-3.4.7-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:fea24543955a6a729c45a73fe90e08c743f0b3334bbf3201e6c4bc1b0c7fa464", size = 227274, upload-time = "2026-04-02T09:26:38.93Z" }, + { url = "https://files.pythonhosted.org/packages/8a/2d/a572df5c9204ab7688ec1edc895a73ebded3b023bb07364710b05dd1c9be/charset_normalizer-3.4.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:bb6d88045545b26da47aa879dd4a89a71d1dce0f0e549b1abcb31dfe4a8eac49", size = 218468, upload-time = "2026-04-02T09:26:40.17Z" }, + { url = "https://files.pythonhosted.org/packages/86/eb/890922a8b03a568ca2f336c36585a4713c55d4d67bf0f0c78924be6315ca/charset_normalizer-3.4.7-cp312-cp312-win32.whl", hash = "sha256:2257141f39fe65a3fdf38aeccae4b953e5f3b3324f4ff0daf9f15b8518666a2c", size = 148460, upload-time = "2026-04-02T09:26:41.416Z" }, + { url = "https://files.pythonhosted.org/packages/35/d9/0e7dffa06c5ab081f75b1b786f0aefc88365825dfcd0ac544bdb7b2b6853/charset_normalizer-3.4.7-cp312-cp312-win_amd64.whl", hash = "sha256:5ed6ab538499c8644b8a3e18debabcd7ce684f3fa91cf867521a7a0279cab2d6", size = 159330, upload-time = "2026-04-02T09:26:42.554Z" }, + { url = "https://files.pythonhosted.org/packages/9e/5d/481bcc2a7c88ea6b0878c299547843b2521ccbc40980cb406267088bc701/charset_normalizer-3.4.7-cp312-cp312-win_arm64.whl", hash = "sha256:56be790f86bfb2c98fb742ce566dfb4816e5a83384616ab59c49e0604d49c51d", size = 147828, upload-time = "2026-04-02T09:26:44.075Z" }, + { url = "https://files.pythonhosted.org/packages/db/8f/61959034484a4a7c527811f4721e75d02d653a35afb0b6054474d8185d4c/charset_normalizer-3.4.7-py3-none-any.whl", hash = "sha256:3dce51d0f5e7951f8bb4900c257dad282f49190fdbebecd4ba99bcc41fef404d", size = 61958, upload-time = "2026-04-02T09:28:37.794Z" }, ] [[package]] name = "click" -version = "8.3.1" +version = "8.3.2" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "colorama", marker = "sys_platform == 'win32' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "colorama", marker = "sys_platform == 'win32' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/3d/fa/656b739db8587d7b5dfa22e22ed02566950fbfbcdc20311993483657a5c0/click-8.3.1.tar.gz", hash = "sha256:12ff4785d337a1bb490bb7e9c2b1ee5da3112e94a8622f26a6c77f5d2fc6842a", size = 295065, upload-time = "2025-11-15T20:45:42.706Z" } +sdist = { url = "https://files.pythonhosted.org/packages/57/75/31212c6bf2503fdf920d87fee5d7a86a2e3bcf444984126f13d8e4016804/click-8.3.2.tar.gz", hash = "sha256:14162b8b3b3550a7d479eafa77dfd3c38d9dc8951f6f69c78913a8f9a7540fd5", size = 302856, upload-time = "2026-04-03T19:14:45.118Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/98/78/01c019cdb5d6498122777c1a43056ebb3ebfeef2076d9d026bfe15583b2b/click-8.3.1-py3-none-any.whl", hash = "sha256:981153a64e25f12d547d3426c367a4857371575ee7ad18df2a6183ab0545b2a6", size = 108274, upload-time = "2025-11-15T20:45:41.139Z" }, + { url = "https://files.pythonhosted.org/packages/e4/20/71885d8b97d4f3dde17b1fdb92dbd4908b00541c5a3379787137285f602e/click-8.3.2-py3-none-any.whl", hash = "sha256:1924d2c27c5653561cd2cae4548d1406039cb79b858b747cfea24924bbc1616d", size = 108379, upload-time = "2026-04-03T19:14:43.505Z" }, ] [[package]] @@ -414,7 +1415,7 @@ name = "coloredlogs" version = "15.0.1" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "humanfriendly", marker = "(platform_machine == 'aarch64' and sys_platform == 'linux') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "humanfriendly", marker = "extra == 'extra-16-inference-models-onnx-cpu' or extra == 'extra-16-inference-models-onnx-cu118' or extra == 'extra-16-inference-models-onnx-cu12' or extra == 'extra-16-inference-models-onnx-jp6-cu126' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, ] sdist = { url = "https://files.pythonhosted.org/packages/cc/c7/eed8f27100517e8c0e6b923d5f0845d0cb99763da6fdee00478f91db7325/coloredlogs-15.0.1.tar.gz", hash = "sha256:7c991aa71a4577af2f82600d8f8f3a89f936baeaf9b50a9c197da014e5bf16b0", size = 278520, upload-time = "2021-06-11T10:22:45.202Z" } wheels = [ @@ -434,8 +1435,16 @@ wheels = [ name = "contourpy" version = "1.3.2" source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin'", +] dependencies = [ - { name = "numpy" }, + { name = "numpy", version = "1.26.4", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and extra == 'extra-16-inference-models-falcon-perception') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, ] sdist = { url = "https://files.pythonhosted.org/packages/66/54/eb9bfc647b19f2009dd5c7f5ec51c4e6ca831725f1aea7a993034f483147/contourpy-1.3.2.tar.gz", hash = "sha256:b6945942715a034c671b7fc54f9588126b0b8bf23db2696e3ca8328f3ff0ab54", size = 13466130, upload-time = "2025-04-15T17:47:53.79Z" } wheels = [ @@ -477,17 +1486,744 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/87/68/7f46fb537958e87427d98a4074bcde4b67a70b04900cfc5ce29bc2f556c1/contourpy-1.3.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:8c5acb8dddb0752bf252e01a3035b21443158910ac16a3b0d20e7fed7d534ce5", size = 221791, upload-time = "2025-04-15T17:45:24.794Z" }, ] +[[package]] +name = "contourpy" +version = "1.3.3" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", +] +dependencies = [ + { name = "numpy", version = "1.26.4", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "2.4.4", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and extra == 'extra-16-inference-models-falcon-perception') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/58/01/1253e6698a07380cd31a736d248a3f2a50a7c88779a1813da27503cadc2a/contourpy-1.3.3.tar.gz", hash = "sha256:083e12155b210502d0bca491432bb04d56dc3432f95a979b429f2848c3dbe880", size = 13466174, upload-time = "2025-07-26T12:03:12.549Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/91/2e/c4390a31919d8a78b90e8ecf87cd4b4c4f05a5b48d05ec17db8e5404c6f4/contourpy-1.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:709a48ef9a690e1343202916450bc48b9e51c049b089c7f79a267b46cffcdaa1", size = 288773, upload-time = "2025-07-26T12:01:02.277Z" }, + { url = "https://files.pythonhosted.org/packages/0d/44/c4b0b6095fef4dc9c420e041799591e3b63e9619e3044f7f4f6c21c0ab24/contourpy-1.3.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:23416f38bfd74d5d28ab8429cc4d63fa67d5068bd711a85edb1c3fb0c3e2f381", size = 270149, upload-time = "2025-07-26T12:01:04.072Z" }, + { url = "https://files.pythonhosted.org/packages/30/2e/dd4ced42fefac8470661d7cb7e264808425e6c5d56d175291e93890cce09/contourpy-1.3.3-cp311-cp311-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:929ddf8c4c7f348e4c0a5a3a714b5c8542ffaa8c22954862a46ca1813b667ee7", size = 329222, upload-time = "2025-07-26T12:01:05.688Z" }, + { url = "https://files.pythonhosted.org/packages/f2/74/cc6ec2548e3d276c71389ea4802a774b7aa3558223b7bade3f25787fafc2/contourpy-1.3.3-cp311-cp311-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:9e999574eddae35f1312c2b4b717b7885d4edd6cb46700e04f7f02db454e67c1", size = 377234, upload-time = "2025-07-26T12:01:07.054Z" }, + { url = "https://files.pythonhosted.org/packages/03/b3/64ef723029f917410f75c09da54254c5f9ea90ef89b143ccadb09df14c15/contourpy-1.3.3-cp311-cp311-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0bf67e0e3f482cb69779dd3061b534eb35ac9b17f163d851e2a547d56dba0a3a", size = 380555, upload-time = "2025-07-26T12:01:08.801Z" }, + { url = "https://files.pythonhosted.org/packages/5f/4b/6157f24ca425b89fe2eb7e7be642375711ab671135be21e6faa100f7448c/contourpy-1.3.3-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:51e79c1f7470158e838808d4a996fa9bac72c498e93d8ebe5119bc1e6becb0db", size = 355238, upload-time = "2025-07-26T12:01:10.319Z" }, + { url = "https://files.pythonhosted.org/packages/98/56/f914f0dd678480708a04cfd2206e7c382533249bc5001eb9f58aa693e200/contourpy-1.3.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:598c3aaece21c503615fd59c92a3598b428b2f01bfb4b8ca9c4edeecc2438620", size = 1326218, upload-time = "2025-07-26T12:01:12.659Z" }, + { url = "https://files.pythonhosted.org/packages/fb/d7/4a972334a0c971acd5172389671113ae82aa7527073980c38d5868ff1161/contourpy-1.3.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:322ab1c99b008dad206d406bb61d014cf0174df491ae9d9d0fac6a6fda4f977f", size = 1392867, upload-time = "2025-07-26T12:01:15.533Z" }, + { url = "https://files.pythonhosted.org/packages/75/3e/f2cc6cd56dc8cff46b1a56232eabc6feea52720083ea71ab15523daab796/contourpy-1.3.3-cp311-cp311-win32.whl", hash = "sha256:fd907ae12cd483cd83e414b12941c632a969171bf90fc937d0c9f268a31cafff", size = 183677, upload-time = "2025-07-26T12:01:17.088Z" }, + { url = "https://files.pythonhosted.org/packages/98/4b/9bd370b004b5c9d8045c6c33cf65bae018b27aca550a3f657cdc99acdbd8/contourpy-1.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:3519428f6be58431c56581f1694ba8e50626f2dd550af225f82fb5f5814d2a42", size = 225234, upload-time = "2025-07-26T12:01:18.256Z" }, + { url = "https://files.pythonhosted.org/packages/d9/b6/71771e02c2e004450c12b1120a5f488cad2e4d5b590b1af8bad060360fe4/contourpy-1.3.3-cp311-cp311-win_arm64.whl", hash = "sha256:15ff10bfada4bf92ec8b31c62bf7c1834c244019b4a33095a68000d7075df470", size = 193123, upload-time = "2025-07-26T12:01:19.848Z" }, + { url = "https://files.pythonhosted.org/packages/be/45/adfee365d9ea3d853550b2e735f9d66366701c65db7855cd07621732ccfc/contourpy-1.3.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b08a32ea2f8e42cf1d4be3169a98dd4be32bafe4f22b6c4cb4ba810fa9e5d2cb", size = 293419, upload-time = "2025-07-26T12:01:21.16Z" }, + { url = "https://files.pythonhosted.org/packages/53/3e/405b59cfa13021a56bba395a6b3aca8cec012b45bf177b0eaf7a202cde2c/contourpy-1.3.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:556dba8fb6f5d8742f2923fe9457dbdd51e1049c4a43fd3986a0b14a1d815fc6", size = 273979, upload-time = "2025-07-26T12:01:22.448Z" }, + { url = "https://files.pythonhosted.org/packages/d4/1c/a12359b9b2ca3a845e8f7f9ac08bdf776114eb931392fcad91743e2ea17b/contourpy-1.3.3-cp312-cp312-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:92d9abc807cf7d0e047b95ca5d957cf4792fcd04e920ca70d48add15c1a90ea7", size = 332653, upload-time = "2025-07-26T12:01:24.155Z" }, + { url = "https://files.pythonhosted.org/packages/63/12/897aeebfb475b7748ea67b61e045accdfcf0d971f8a588b67108ed7f5512/contourpy-1.3.3-cp312-cp312-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b2e8faa0ed68cb29af51edd8e24798bb661eac3bd9f65420c1887b6ca89987c8", size = 379536, upload-time = "2025-07-26T12:01:25.91Z" }, + { url = "https://files.pythonhosted.org/packages/43/8a/a8c584b82deb248930ce069e71576fc09bd7174bbd35183b7943fb1064fd/contourpy-1.3.3-cp312-cp312-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:626d60935cf668e70a5ce6ff184fd713e9683fb458898e4249b63be9e28286ea", size = 384397, upload-time = "2025-07-26T12:01:27.152Z" }, + { url = "https://files.pythonhosted.org/packages/cc/8f/ec6289987824b29529d0dfda0d74a07cec60e54b9c92f3c9da4c0ac732de/contourpy-1.3.3-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4d00e655fcef08aba35ec9610536bfe90267d7ab5ba944f7032549c55a146da1", size = 362601, upload-time = "2025-07-26T12:01:28.808Z" }, + { url = "https://files.pythonhosted.org/packages/05/0a/a3fe3be3ee2dceb3e615ebb4df97ae6f3828aa915d3e10549ce016302bd1/contourpy-1.3.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:451e71b5a7d597379ef572de31eeb909a87246974d960049a9848c3bc6c41bf7", size = 1331288, upload-time = "2025-07-26T12:01:31.198Z" }, + { url = "https://files.pythonhosted.org/packages/33/1d/acad9bd4e97f13f3e2b18a3977fe1b4a37ecf3d38d815333980c6c72e963/contourpy-1.3.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:459c1f020cd59fcfe6650180678a9993932d80d44ccde1fa1868977438f0b411", size = 1403386, upload-time = "2025-07-26T12:01:33.947Z" }, + { url = "https://files.pythonhosted.org/packages/cf/8f/5847f44a7fddf859704217a99a23a4f6417b10e5ab1256a179264561540e/contourpy-1.3.3-cp312-cp312-win32.whl", hash = "sha256:023b44101dfe49d7d53932be418477dba359649246075c996866106da069af69", size = 185018, upload-time = "2025-07-26T12:01:35.64Z" }, + { url = "https://files.pythonhosted.org/packages/19/e8/6026ed58a64563186a9ee3f29f41261fd1828f527dd93d33b60feca63352/contourpy-1.3.3-cp312-cp312-win_amd64.whl", hash = "sha256:8153b8bfc11e1e4d75bcb0bff1db232f9e10b274e0929de9d608027e0d34ff8b", size = 226567, upload-time = "2025-07-26T12:01:36.804Z" }, + { url = "https://files.pythonhosted.org/packages/d1/e2/f05240d2c39a1ed228d8328a78b6f44cd695f7ef47beb3e684cf93604f86/contourpy-1.3.3-cp312-cp312-win_arm64.whl", hash = "sha256:07ce5ed73ecdc4a03ffe3e1b3e3c1166db35ae7584be76f65dbbe28a7791b0cc", size = 193655, upload-time = "2025-07-26T12:01:37.999Z" }, + { url = "https://files.pythonhosted.org/packages/a5/29/8dcfe16f0107943fa92388c23f6e05cff0ba58058c4c95b00280d4c75a14/contourpy-1.3.3-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:cd5dfcaeb10f7b7f9dc8941717c6c2ade08f587be2226222c12b25f0483ed497", size = 278809, upload-time = "2025-07-26T12:02:52.74Z" }, + { url = "https://files.pythonhosted.org/packages/85/a9/8b37ef4f7dafeb335daee3c8254645ef5725be4d9c6aa70b50ec46ef2f7e/contourpy-1.3.3-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:0c1fc238306b35f246d61a1d416a627348b5cf0648648a031e14bb8705fcdfe8", size = 261593, upload-time = "2025-07-26T12:02:54.037Z" }, + { url = "https://files.pythonhosted.org/packages/0a/59/ebfb8c677c75605cc27f7122c90313fd2f375ff3c8d19a1694bda74aaa63/contourpy-1.3.3-pp311-pypy311_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:70f9aad7de812d6541d29d2bbf8feb22ff7e1c299523db288004e3157ff4674e", size = 302202, upload-time = "2025-07-26T12:02:55.947Z" }, + { url = "https://files.pythonhosted.org/packages/3c/37/21972a15834d90bfbfb009b9d004779bd5a07a0ec0234e5ba8f64d5736f4/contourpy-1.3.3-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5ed3657edf08512fc3fe81b510e35c2012fbd3081d2e26160f27ca28affec989", size = 329207, upload-time = "2025-07-26T12:02:57.468Z" }, + { url = "https://files.pythonhosted.org/packages/0c/58/bd257695f39d05594ca4ad60df5bcb7e32247f9951fd09a9b8edb82d1daa/contourpy-1.3.3-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:3d1a3799d62d45c18bafd41c5fa05120b96a28079f2393af559b843d1a966a77", size = 225315, upload-time = "2025-07-26T12:02:58.801Z" }, +] + [[package]] name = "cssselect2" -version = "0.8.0" +version = "0.9.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "tinycss2" }, { name = "webencodings" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/9f/86/fd7f58fc498b3166f3a7e8e0cddb6e620fe1da35b02248b1bd59e95dbaaa/cssselect2-0.8.0.tar.gz", hash = "sha256:7674ffb954a3b46162392aee2a3a0aedb2e14ecf99fcc28644900f4e6e3e9d3a", size = 35716, upload-time = "2025-03-05T14:46:07.988Z" } +sdist = { url = "https://files.pythonhosted.org/packages/e0/20/92eaa6b0aec7189fa4b75c890640e076e9e793095721db69c5c81142c2e1/cssselect2-0.9.0.tar.gz", hash = "sha256:759aa22c216326356f65e62e791d66160a0f9c91d1424e8d8adc5e74dddfc6fb", size = 35595, upload-time = "2026-02-12T17:16:39.614Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/21/0e/8459ca4413e1a21a06c97d134bfaf18adfd27cea068813dc0faae06cbf00/cssselect2-0.9.0-py3-none-any.whl", hash = "sha256:6a99e5f91f9a016a304dd929b0966ca464bcfda15177b6fb4a118fc0fb5d9563", size = 15453, upload-time = "2026-02-12T17:16:38.317Z" }, +] + +[[package]] +name = "cuda-bindings" +version = "12.9.6" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cuda-pathfinder", marker = "(sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, +] wheels = [ - { url = "https://files.pythonhosted.org/packages/0f/e7/aa315e6a749d9b96c2504a1ba0ba031ba2d0517e972ce22682e3fccecb09/cssselect2-0.8.0-py3-none-any.whl", hash = "sha256:46fc70ebc41ced7a32cd42d58b1884d72ade23d21e5a4eaaf022401c13f0e76e", size = 15454, upload-time = "2025-03-05T14:46:06.463Z" }, + { url = "https://files.pythonhosted.org/packages/08/9d/dd87e1071bcb2e438c14e2e4497aa0037faf2c9775ac1d172f578f448668/cuda_bindings-12.9.6-cp310-cp310-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bb2f1eedc8f65902b34e807c21a3b7c922dc8de1f51d0829ecbb5c6a5e9c5ff1", size = 7094433, upload-time = "2026-03-11T14:47:22.811Z" }, + { url = "https://files.pythonhosted.org/packages/8c/1d/5631df2faa5e5f6bd3e8fef098d6fc1b7c6f38811821332ef28ad82ce0d4/cuda_bindings-12.9.6-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d9f9031e7a265e74f1517668139987253552d1677d995da4b0d990aa19b9b9b0", size = 7626833, upload-time = "2026-03-11T14:47:25.046Z" }, + { url = "https://files.pythonhosted.org/packages/1b/76/d1783a73719c3e083305766b097115c21311a0e6c939af99910826419e99/cuda_bindings-12.9.6-cp310-cp310-win_amd64.whl", hash = "sha256:69e820e72af29bac65cf821a0a7b2546ef4cca5685640739a828c00ef91fdbef", size = 7148460, upload-time = "2026-03-11T14:47:27.469Z" }, + { url = "https://files.pythonhosted.org/packages/1f/a5/e9d37c10f6c27c9c65d53c6cd6d9763e1df99c004780585fc2ad9041fbe3/cuda_bindings-12.9.6-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2662f59db67d9aeaf8959c593c91f600792c2970cf02cae2814387fc687b115a", size = 7090971, upload-time = "2026-03-11T14:47:29.526Z" }, + { url = "https://files.pythonhosted.org/packages/66/d5/bd4c03e9516d3cf788a270debe28d687e5c48b13a9931599bbddf01de302/cuda_bindings-12.9.6-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e8519707644ea630a365b101703a9136f4cb144760cc2c73281c38a05e07d08d", size = 7618785, upload-time = "2026-03-11T14:47:31.531Z" }, + { url = "https://files.pythonhosted.org/packages/ca/7b/178b040b35638e93a601aabc6061d52150f6685c7520536b4e7e108db5f9/cuda_bindings-12.9.6-cp311-cp311-win_amd64.whl", hash = "sha256:e0ac0a4facdb9a6563984ae4917c7a658cbc6a5d0feb858e5a79ba4047c36397", size = 7175051, upload-time = "2026-03-11T14:47:33.213Z" }, + { url = "https://files.pythonhosted.org/packages/50/04/8a4d45dc154a8a32982658cc55be291e9778d1197834b15d33427e2f65c1/cuda_bindings-12.9.6-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0ea331bc47d9988cc61f0ecc5fa8df9dd188b4493ae1c6688bb1ee8ce8ba1af4", size = 7050347, upload-time = "2026-03-11T14:47:35.221Z" }, + { url = "https://files.pythonhosted.org/packages/3b/69/4b0375e1b120dfa7427c31c8420cfdee596ecd03955fd291a96116fa375d/cuda_bindings-12.9.6-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b2b54b95a47104eff56b5155818ab5790e3ccdba8dd51e2928ae56782aaf5b02", size = 7590574, upload-time = "2026-03-11T14:47:37.452Z" }, + { url = "https://files.pythonhosted.org/packages/a4/35/71b818233e1ea503face2a0e6f6f2c73ca02b946ca9613104667ba4a8454/cuda_bindings-12.9.6-cp312-cp312-win_amd64.whl", hash = "sha256:407b85671c363a5ddf77cd4bdeb05355340a88ac2cd0c6adc1a0f4b4d11c13c2", size = 7364562, upload-time = "2026-03-11T14:47:39.188Z" }, +] + +[[package]] +name = "cuda-pathfinder" +version = "1.5.2" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f2/f9/1b9b60a30fc463c14cdea7a77228131a0ccc89572e8df9cb86c9648271ab/cuda_pathfinder-1.5.2-py3-none-any.whl", hash = "sha256:0c5f160a7756c5b072723cbbd6d861e38917ef956c68150b02f0b6e9271c71fa", size = 49988, upload-time = "2026-04-06T23:01:05.17Z" }, +] + +[[package]] +name = "cuda-toolkit" +version = "12.8.1" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d4/c8/7dce3a0b15b42a3b58e7d96eb22a687d3bf2c44e01d149a6874629cd9938/cuda_toolkit-12.8.1-py2.py3-none-any.whl", hash = "sha256:adc7906af4ecbf9a352f9dca5734eceb21daec281ccfcf5675e1d2f724fc2cba", size = 2283, upload-time = "2025-08-13T02:03:07.842Z" }, +] + +[package.optional-dependencies] +cublas = [ + { name = "nvidia-cublas-cu12", version = "12.8.4.1", source = { registry = "https://pypi.org/simple" }, marker = "(sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, +] +cudart = [ + { name = "nvidia-cuda-runtime-cu12", version = "12.8.90", source = { registry = "https://pypi.org/simple" }, marker = "(sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, +] +cufft = [ + { name = "nvidia-cufft-cu12", version = "11.3.3.83", source = { registry = "https://pypi.org/simple" }, marker = "(sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, +] +cufile = [ + { name = "nvidia-cufile-cu12", marker = "(sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, +] +cupti = [ + { name = "nvidia-cuda-cupti-cu12", version = "12.8.90", source = { registry = "https://pypi.org/simple" }, marker = "(sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, +] +curand = [ + { name = "nvidia-curand-cu12", version = "10.3.9.90", source = { registry = "https://pypi.org/simple" }, marker = "(sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, +] +cusolver = [ + { name = "nvidia-cusolver-cu12", version = "11.7.3.90", source = { registry = "https://pypi.org/simple" }, marker = "(sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, +] +cusparse = [ + { name = "nvidia-cusparse-cu12", version = "12.5.8.93", source = { registry = "https://pypi.org/simple" }, marker = "(sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, +] +nvjitlink = [ + { name = "nvidia-nvjitlink-cu12", version = "12.8.93", source = { registry = "https://pypi.org/simple" }, marker = "(sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, +] +nvrtc = [ + { name = "nvidia-cuda-nvrtc-cu12", version = "12.8.93", source = { registry = "https://pypi.org/simple" }, marker = "(sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, +] +nvtx = [ + { name = "nvidia-nvtx-cu12", version = "12.8.90", source = { registry = "https://pypi.org/simple" }, marker = "(sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, ] [[package]] @@ -499,25 +2235,52 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/e7/05/c19819d5e3d95294a6f5947fb9b9629efb316b96de511b418c53d245aae6/cycler-0.12.1-py3-none-any.whl", hash = "sha256:85cef7cff222d8644161529808465972e51340599459b8ac3ccbac5a854e0d30", size = 8321, upload-time = "2023-10-07T05:32:16.783Z" }, ] +[[package]] +name = "datasets" +version = "4.8.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "dill" }, + { name = "filelock" }, + { name = "fsspec", version = "2026.2.0", source = { registry = "https://pypi.org/simple" }, extra = ["http"], marker = "extra == 'extra-16-inference-models-falcon-perception' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "httpx" }, + { name = "huggingface-hub" }, + { name = "multiprocess" }, + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and extra == 'extra-16-inference-models-falcon-perception') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "2.4.4", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and extra == 'extra-16-inference-models-falcon-perception') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "packaging" }, + { name = "pandas", version = "2.3.3", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and extra == 'extra-16-inference-models-falcon-perception') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "pandas", version = "3.0.2", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and extra == 'extra-16-inference-models-falcon-perception') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "pyarrow" }, + { name = "pyyaml" }, + { name = "requests" }, + { name = "tqdm" }, + { name = "xxhash" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/22/22/73e46ac7a8c25e7ef0b3bd6f10da3465021d90219a32eb0b4d2afea4c56e/datasets-4.8.4.tar.gz", hash = "sha256:a1429ed853275ce7943a01c6d2e25475b4501eb758934362106a280470df3a52", size = 604382, upload-time = "2026-03-23T14:21:17.987Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b0/e5/247d094108e42ac26363ab8dc57f168840cf7c05774b40ffeb0d78868fcc/datasets-4.8.4-py3-none-any.whl", hash = "sha256:cdc8bee4698e549d78bf1fed6aea2eebc760b22b084f07e6fc020c6577a6ce6d", size = 526991, upload-time = "2026-03-23T14:21:15.89Z" }, +] + [[package]] name = "debugpy" -version = "1.8.19" +version = "1.8.20" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/73/75/9e12d4d42349b817cd545b89247696c67917aab907012ae5b64bbfea3199/debugpy-1.8.19.tar.gz", hash = "sha256:eea7e5987445ab0b5ed258093722d5ecb8bb72217c5c9b1e21f64efe23ddebdb", size = 1644590, upload-time = "2025-12-15T21:53:28.044Z" } +sdist = { url = "https://files.pythonhosted.org/packages/e0/b7/cd8080344452e4874aae67c40d8940e2b4d47b01601a8fd9f44786c757c7/debugpy-1.8.20.tar.gz", hash = "sha256:55bc8701714969f1ab89a6d5f2f3d40c36f91b2cbe2f65d98bf8196f6a6a2c33", size = 1645207, upload-time = "2026-01-29T23:03:28.199Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/bf/98/d57054371887f37d3c959a7a8dc3c76b763acb65f5e78d849d7db7cadc5b/debugpy-1.8.19-cp310-cp310-macosx_15_0_x86_64.whl", hash = "sha256:fce6da15d73be5935b4438435c53adb512326a3e11e4f90793ea87cd9f018254", size = 2098493, upload-time = "2025-12-15T21:53:30.149Z" }, - { url = "https://files.pythonhosted.org/packages/ee/dd/c517b9aa3500157a30e4f4c4f5149f880026bd039d2b940acd2383a85d8e/debugpy-1.8.19-cp310-cp310-manylinux_2_34_x86_64.whl", hash = "sha256:e24b1652a1df1ab04d81e7ead446a91c226de704ff5dde6bd0a0dbaab07aa3f2", size = 3087875, upload-time = "2025-12-15T21:53:31.511Z" }, - { url = "https://files.pythonhosted.org/packages/d8/57/3d5a5b0da9b63445253107ead151eff29190c6ad7440c68d1a59d56613aa/debugpy-1.8.19-cp310-cp310-win32.whl", hash = "sha256:327cb28c3ad9e17bc925efc7f7018195fd4787c2fe4b7af1eec11f1d19bdec62", size = 5239378, upload-time = "2025-12-15T21:53:32.979Z" }, - { url = "https://files.pythonhosted.org/packages/a6/36/7f9053c4c549160c87ae7e43800138f2695578c8b65947114c97250983b6/debugpy-1.8.19-cp310-cp310-win_amd64.whl", hash = "sha256:b7dd275cf2c99e53adb9654f5ae015f70415bbe2bacbe24cfee30d54b6aa03c5", size = 5271129, upload-time = "2025-12-15T21:53:35.085Z" }, - { url = "https://files.pythonhosted.org/packages/80/e2/48531a609b5a2aa94c6b6853afdfec8da05630ab9aaa96f1349e772119e9/debugpy-1.8.19-cp311-cp311-macosx_15_0_universal2.whl", hash = "sha256:c5dcfa21de1f735a4f7ced4556339a109aa0f618d366ede9da0a3600f2516d8b", size = 2207620, upload-time = "2025-12-15T21:53:37.1Z" }, - { url = "https://files.pythonhosted.org/packages/1b/d4/97775c01d56071969f57d93928899e5616a4cfbbf4c8cc75390d3a51c4a4/debugpy-1.8.19-cp311-cp311-manylinux_2_34_x86_64.whl", hash = "sha256:806d6800246244004625d5222d7765874ab2d22f3ba5f615416cf1342d61c488", size = 3170796, upload-time = "2025-12-15T21:53:38.513Z" }, - { url = "https://files.pythonhosted.org/packages/8d/7e/8c7681bdb05be9ec972bbb1245eb7c4c7b0679bb6a9e6408d808bc876d3d/debugpy-1.8.19-cp311-cp311-win32.whl", hash = "sha256:783a519e6dfb1f3cd773a9bda592f4887a65040cb0c7bd38dde410f4e53c40d4", size = 5164287, upload-time = "2025-12-15T21:53:40.857Z" }, - { url = "https://files.pythonhosted.org/packages/f2/a8/aaac7ff12ddf5d68a39e13a423a8490426f5f661384f5ad8d9062761bd8e/debugpy-1.8.19-cp311-cp311-win_amd64.whl", hash = "sha256:14035cbdbb1fe4b642babcdcb5935c2da3b1067ac211c5c5a8fdc0bb31adbcaa", size = 5188269, upload-time = "2025-12-15T21:53:42.359Z" }, - { url = "https://files.pythonhosted.org/packages/4a/15/d762e5263d9e25b763b78be72dc084c7a32113a0bac119e2f7acae7700ed/debugpy-1.8.19-cp312-cp312-macosx_15_0_universal2.whl", hash = "sha256:bccb1540a49cde77edc7ce7d9d075c1dbeb2414751bc0048c7a11e1b597a4c2e", size = 2549995, upload-time = "2025-12-15T21:53:43.773Z" }, - { url = "https://files.pythonhosted.org/packages/a7/88/f7d25c68b18873b7c53d7c156ca7a7ffd8e77073aa0eac170a9b679cf786/debugpy-1.8.19-cp312-cp312-manylinux_2_34_x86_64.whl", hash = "sha256:e9c68d9a382ec754dc05ed1d1b4ed5bd824b9f7c1a8cd1083adb84b3c93501de", size = 4309891, upload-time = "2025-12-15T21:53:45.26Z" }, - { url = "https://files.pythonhosted.org/packages/c5/4f/a65e973aba3865794da65f71971dca01ae66666132c7b2647182d5be0c5f/debugpy-1.8.19-cp312-cp312-win32.whl", hash = "sha256:6599cab8a783d1496ae9984c52cb13b7c4a3bd06a8e6c33446832a5d97ce0bee", size = 5286355, upload-time = "2025-12-15T21:53:46.763Z" }, - { url = "https://files.pythonhosted.org/packages/d8/3a/d3d8b48fec96e3d824e404bf428276fb8419dfa766f78f10b08da1cb2986/debugpy-1.8.19-cp312-cp312-win_amd64.whl", hash = "sha256:66e3d2fd8f2035a8f111eb127fa508469dfa40928a89b460b41fd988684dc83d", size = 5328239, upload-time = "2025-12-15T21:53:48.868Z" }, - { url = "https://files.pythonhosted.org/packages/25/3e/e27078370414ef35fafad2c06d182110073daaeb5d3bf734b0b1eeefe452/debugpy-1.8.19-py2.py3-none-any.whl", hash = "sha256:360ffd231a780abbc414ba0f005dad409e71c78637efe8f2bd75837132a41d38", size = 5292321, upload-time = "2025-12-15T21:54:16.024Z" }, + { url = "https://files.pythonhosted.org/packages/71/be/8bd693a0b9d53d48c8978fa5d889e06f3b5b03e45fd1ea1e78267b4887cb/debugpy-1.8.20-cp310-cp310-macosx_15_0_x86_64.whl", hash = "sha256:157e96ffb7f80b3ad36d808646198c90acb46fdcfd8bb1999838f0b6f2b59c64", size = 2099192, upload-time = "2026-01-29T23:03:29.707Z" }, + { url = "https://files.pythonhosted.org/packages/77/1b/85326d07432086a06361d493d2743edd0c4fc2ef62162be7f8618441ac37/debugpy-1.8.20-cp310-cp310-manylinux_2_34_x86_64.whl", hash = "sha256:c1178ae571aff42e61801a38b007af504ec8e05fde1c5c12e5a7efef21009642", size = 3088568, upload-time = "2026-01-29T23:03:31.467Z" }, + { url = "https://files.pythonhosted.org/packages/e8/60/3e08462ee3eccd10998853eb35947c416e446bfe2bc37dbb886b9044586c/debugpy-1.8.20-cp310-cp310-win32.whl", hash = "sha256:c29dd9d656c0fbd77906a6e6a82ae4881514aa3294b94c903ff99303e789b4a2", size = 5284399, upload-time = "2026-01-29T23:03:33.678Z" }, + { url = "https://files.pythonhosted.org/packages/72/43/09d49106e770fe558ced5e80df2e3c2ebee10e576eda155dcc5670473663/debugpy-1.8.20-cp310-cp310-win_amd64.whl", hash = "sha256:3ca85463f63b5dd0aa7aaa933d97cbc47c174896dcae8431695872969f981893", size = 5316388, upload-time = "2026-01-29T23:03:35.095Z" }, + { url = "https://files.pythonhosted.org/packages/51/56/c3baf5cbe4dd77427fd9aef99fcdade259ad128feeb8a786c246adb838e5/debugpy-1.8.20-cp311-cp311-macosx_15_0_universal2.whl", hash = "sha256:eada6042ad88fa1571b74bd5402ee8b86eded7a8f7b827849761700aff171f1b", size = 2208318, upload-time = "2026-01-29T23:03:36.481Z" }, + { url = "https://files.pythonhosted.org/packages/9a/7d/4fa79a57a8e69fe0d9763e98d1110320f9ecd7f1f362572e3aafd7417c9d/debugpy-1.8.20-cp311-cp311-manylinux_2_34_x86_64.whl", hash = "sha256:7de0b7dfeedc504421032afba845ae2a7bcc32ddfb07dae2c3ca5442f821c344", size = 3171493, upload-time = "2026-01-29T23:03:37.775Z" }, + { url = "https://files.pythonhosted.org/packages/7d/f2/1e8f8affe51e12a26f3a8a8a4277d6e60aa89d0a66512f63b1e799d424a4/debugpy-1.8.20-cp311-cp311-win32.whl", hash = "sha256:773e839380cf459caf73cc533ea45ec2737a5cc184cf1b3b796cd4fd98504fec", size = 5209240, upload-time = "2026-01-29T23:03:39.109Z" }, + { url = "https://files.pythonhosted.org/packages/d5/92/1cb532e88560cbee973396254b21bece8c5d7c2ece958a67afa08c9f10dc/debugpy-1.8.20-cp311-cp311-win_amd64.whl", hash = "sha256:1f7650546e0eded1902d0f6af28f787fa1f1dbdbc97ddabaf1cd963a405930cb", size = 5233481, upload-time = "2026-01-29T23:03:40.659Z" }, + { url = "https://files.pythonhosted.org/packages/14/57/7f34f4736bfb6e00f2e4c96351b07805d83c9a7b33d28580ae01374430f7/debugpy-1.8.20-cp312-cp312-macosx_15_0_universal2.whl", hash = "sha256:4ae3135e2089905a916909ef31922b2d733d756f66d87345b3e5e52b7a55f13d", size = 2550686, upload-time = "2026-01-29T23:03:42.023Z" }, + { url = "https://files.pythonhosted.org/packages/ab/78/b193a3975ca34458f6f0e24aaf5c3e3da72f5401f6054c0dfd004b41726f/debugpy-1.8.20-cp312-cp312-manylinux_2_34_x86_64.whl", hash = "sha256:88f47850a4284b88bd2bfee1f26132147d5d504e4e86c22485dfa44b97e19b4b", size = 4310588, upload-time = "2026-01-29T23:03:43.314Z" }, + { url = "https://files.pythonhosted.org/packages/c1/55/f14deb95eaf4f30f07ef4b90a8590fc05d9e04df85ee379712f6fb6736d7/debugpy-1.8.20-cp312-cp312-win32.whl", hash = "sha256:4057ac68f892064e5f98209ab582abfee3b543fb55d2e87610ddc133a954d390", size = 5331372, upload-time = "2026-01-29T23:03:45.526Z" }, + { url = "https://files.pythonhosted.org/packages/a1/39/2bef246368bd42f9bd7cba99844542b74b84dacbdbea0833e610f384fee8/debugpy-1.8.20-cp312-cp312-win_amd64.whl", hash = "sha256:a1a8f851e7cf171330679ef6997e9c579ef6dd33c9098458bd9986a0f4ca52e3", size = 5372835, upload-time = "2026-01-29T23:03:47.245Z" }, + { url = "https://files.pythonhosted.org/packages/e0/c3/7f67dea8ccf8fdcb9c99033bbe3e90b9e7395415843accb81428c441be2d/debugpy-1.8.20-py2.py3-none-any.whl", hash = "sha256:5be9bed9ae3be00665a06acaa48f8329d2b9632f15fd09f6a9a8c8d9907e54d7", size = 5337658, upload-time = "2026-01-29T23:04:17.404Z" }, ] [[package]] @@ -538,42 +2301,62 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/07/6c/aa3f2f849e01cb6a001cd8554a88d4c77c5c1a31c95bdf1cf9301e6d9ef4/defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61", size = 25604, upload-time = "2021-03-08T10:59:24.45Z" }, ] +[[package]] +name = "dill" +version = "0.4.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/81/e1/56027a71e31b02ddc53c7d65b01e68edf64dea2932122fe7746a516f75d5/dill-0.4.1.tar.gz", hash = "sha256:423092df4182177d4d8ba8290c8a5b640c66ab35ec7da59ccfa00f6fa3eea5fa", size = 187315, upload-time = "2026-01-19T02:36:56.85Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1e/77/dc8c558f7593132cf8fefec57c4f60c83b16941c574ac5f619abb3ae7933/dill-0.4.1-py3-none-any.whl", hash = "sha256:1e1ce33e978ae97fcfcff5638477032b801c46c7c65cf717f95fbc2248f79a9d", size = 120019, upload-time = "2026-01-19T02:36:55.663Z" }, +] + [[package]] name = "docopt" version = "0.6.2" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/a2/55/8f8cab2afd404cf578136ef2cc5dfb50baa1761b68c9da1fb1e4eed343c9/docopt-0.6.2.tar.gz", hash = "sha256:49b3a825280bd66b3aa83585ef59c4a8c82f2c8a522dbe754a8bc8d08c85c491", size = 25901, upload-time = "2014-06-16T11:18:57.406Z" } +[[package]] +name = "docstring-parser" +version = "0.17.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b2/9d/c3b43da9515bd270df0f80548d9944e389870713cc1fe2b8fb35fe2bcefd/docstring_parser-0.17.0.tar.gz", hash = "sha256:583de4a309722b3315439bb31d64ba3eebada841f2e2cee23b99df001434c912", size = 27442, upload-time = "2025-07-21T07:35:01.868Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/55/e2/2537ebcff11c1ee1ff17d8d0b6f4db75873e3b0fb32c2d4a2ee31ecb310a/docstring_parser-0.17.0-py3-none-any.whl", hash = "sha256:cf2569abd23dce8099b300f9b4fa8191e9582dda731fd533daf54c4551658708", size = 36896, upload-time = "2025-07-21T07:35:00.684Z" }, +] + [[package]] name = "easyocr" version = "1.7.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "ninja" }, - { name = "numpy" }, + { name = "numpy", version = "1.26.4", source = { registry = "https://pypi.org/simple" }, marker = "extra == 'extra-16-inference-models-onnx-jp6-cu126' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and extra == 'extra-16-inference-models-falcon-perception') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "2.4.4", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and extra == 'extra-16-inference-models-falcon-perception') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, { name = "opencv-python-headless" }, { name = "pillow" }, { name = "pyclipper" }, { name = "python-bidi" }, { name = "pyyaml" }, { name = "scikit-image" }, - { name = "scipy" }, + { name = "scipy", version = "1.15.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "scipy", version = "1.17.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, { name = "shapely" }, - { name = "torch", version = "2.6.0+cu124", source = { registry = "https://download.pytorch.org/whl/cu124" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torch", version = "2.7.1", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torch", version = "2.7.1+cpu", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torch", version = "2.7.1+cu118", source = { registry = "https://download.pytorch.org/whl/cu118" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torch", version = "2.7.1+cu128", source = { registry = "https://download.pytorch.org/whl/cu128" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torch", version = "2.8.0", source = { registry = "https://pypi.jetson-ai-lab.io/jp6/cu126/+simple" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torch", version = "2.8.0", source = { registry = "https://pypi.org/simple" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torchvision", version = "0.21.0+cu124", source = { registry = "https://download.pytorch.org/whl/cu124" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torchvision", version = "0.22.1", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torchvision", version = "0.22.1", source = { registry = "https://download.pytorch.org/whl/cu128" }, marker = "(platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torchvision", version = "0.22.1+cpu", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(platform_machine != 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torchvision", version = "0.22.1+cu118", source = { registry = "https://download.pytorch.org/whl/cu118" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torchvision", version = "0.22.1+cu128", source = { registry = "https://download.pytorch.org/whl/cu128" }, marker = "(platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torchvision", version = "0.23.0", source = { registry = "https://pypi.jetson-ai-lab.io/jp6/cu126/+simple" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torchvision", version = "0.23.0", source = { registry = "https://pypi.org/simple" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torch", version = "2.6.0+cu124", source = { registry = "https://download.pytorch.org/whl/cu124" }, marker = "extra == 'extra-16-inference-models-torch-cu124' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torch", version = "2.7.1+cu118", source = { registry = "https://download.pytorch.org/whl/cu118" }, marker = "extra == 'extra-16-inference-models-torch-cu118' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torch", version = "2.11.0", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torch", version = "2.11.0", source = { registry = "https://pypi.jetson-ai-lab.io/jp6/cu126/+simple" }, marker = "extra == 'extra-16-inference-models-torch-jp6-cu126' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126')" }, + { name = "torch", version = "2.11.0", source = { registry = "https://pypi.org/simple" }, marker = "extra == 'extra-16-inference-models-torch-cu126' or extra == 'extra-16-inference-models-torch-cu130' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torch", version = "2.11.0+cpu", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torch", version = "2.11.0+cu128", source = { registry = "https://download.pytorch.org/whl/cu128" }, marker = "extra == 'extra-16-inference-models-torch-cu128' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torchvision", version = "0.21.0+cu124", source = { registry = "https://download.pytorch.org/whl/cu124" }, marker = "extra == 'extra-16-inference-models-torch-cu124' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torchvision", version = "0.22.1+cu118", source = { registry = "https://download.pytorch.org/whl/cu118" }, marker = "extra == 'extra-16-inference-models-torch-cu118' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torchvision", version = "0.26.0", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torchvision", version = "0.26.0", source = { registry = "https://pypi.jetson-ai-lab.io/jp6/cu126/+simple" }, marker = "extra == 'extra-16-inference-models-torch-jp6-cu126' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126')" }, + { name = "torchvision", version = "0.26.0", source = { registry = "https://pypi.org/simple" }, marker = "extra == 'extra-16-inference-models-torch-cu126' or extra == 'extra-16-inference-models-torch-cu130' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torchvision", version = "0.26.0+cpu", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torchvision", version = "0.26.0+cu128", source = { registry = "https://download.pytorch.org/whl/cu128" }, marker = "extra == 'extra-16-inference-models-torch-cu128' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, ] wheels = [ { url = "https://files.pythonhosted.org/packages/bb/84/4a2cab0e6adde6a85e7ba543862e5fc0250c51f3ac721a078a55cdcff250/easyocr-1.7.2-py3-none-any.whl", hash = "sha256:5be12f9b0e595d443c9c3d10b0542074b50f0ec2d98b141a109cd961fd1c177c", size = 2870178, upload-time = "2024-09-24T11:34:43.554Z" }, @@ -581,32 +2364,32 @@ wheels = [ [[package]] name = "einops" -version = "0.8.1" +version = "0.8.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e5/81/df4fbe24dff8ba3934af99044188e20a98ed441ad17a274539b74e82e126/einops-0.8.1.tar.gz", hash = "sha256:de5d960a7a761225532e0f1959e5315ebeafc0cd43394732f103ca44b9837e84", size = 54805, upload-time = "2025-02-09T03:17:00.434Z" } +sdist = { url = "https://files.pythonhosted.org/packages/2c/77/850bef8d72ffb9219f0b1aac23fbc1bf7d038ee6ea666f331fa273031aa2/einops-0.8.2.tar.gz", hash = "sha256:609da665570e5e265e27283aab09e7f279ade90c4f01bcfca111f3d3e13f2827", size = 56261, upload-time = "2026-01-26T04:13:17.638Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/87/62/9773de14fe6c45c23649e98b83231fffd7b9892b6cf863251dc2afa73643/einops-0.8.1-py3-none-any.whl", hash = "sha256:919387eb55330f5757c6bea9165c5ff5cfe63a642682ea788a6d472576d81737", size = 64359, upload-time = "2025-02-09T03:17:01.998Z" }, + { url = "https://files.pythonhosted.org/packages/2a/09/f8d8f8f31e4483c10a906437b4ce31bdf3d6d417b73fe33f1a8b59e34228/einops-0.8.2-py3-none-any.whl", hash = "sha256:54058201ac7087911181bfec4af6091bb59380360f069276601256a76af08193", size = 65638, upload-time = "2026-01-26T04:13:18.546Z" }, ] [[package]] name = "exceptiongroup" -version = "1.3.0" +version = "1.3.1" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "typing-extensions", marker = "python_full_version < '3.11' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "typing-extensions", marker = "python_full_version < '3.11' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/0b/9f/a65090624ecf468cdca03533906e7c69ed7588582240cfe7cc9e770b50eb/exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88", size = 29749, upload-time = "2025-05-10T17:42:51.123Z" } +sdist = { url = "https://files.pythonhosted.org/packages/50/79/66800aadf48771f6b62f7eb014e352e5d06856655206165d775e675a02c9/exceptiongroup-1.3.1.tar.gz", hash = "sha256:8b412432c6055b0b7d14c310000ae93352ed6754f70fa8f7c34141f91c4e3219", size = 30371, upload-time = "2025-11-21T23:01:54.787Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/36/f4/c6e662dade71f56cd2f3735141b265c3c79293c109549c1e6933b0651ffc/exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10", size = 16674, upload-time = "2025-05-10T17:42:49.33Z" }, + { url = "https://files.pythonhosted.org/packages/8a/0e/97c33bf5009bdbac74fd2beace167cab3f978feb69cc36f1ef79360d6c4e/exceptiongroup-1.3.1-py3-none-any.whl", hash = "sha256:a7a39a3bd276781e98394987d3a5701d0c4edffb633bb7a5144577f82c773598", size = 16740, upload-time = "2025-11-21T23:01:53.443Z" }, ] [[package]] name = "execnet" -version = "2.1.1" +version = "2.1.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/bb/ff/b4c0dc78fbe20c3e59c0c7334de0c27eb4001a2b2017999af398bf730817/execnet-2.1.1.tar.gz", hash = "sha256:5189b52c6121c24feae288166ab41b32549c7e2348652736540b9e6e7d4e72e3", size = 166524, upload-time = "2024-04-08T09:04:19.245Z" } +sdist = { url = "https://files.pythonhosted.org/packages/bf/89/780e11f9588d9e7128a3f87788354c7946a9cbb1401ad38a48c4db9a4f07/execnet-2.1.2.tar.gz", hash = "sha256:63d83bfdd9a23e35b9c6a3261412324f964c2ec8dcd8d3c6916ee9373e0befcd", size = 166622, upload-time = "2025-11-12T09:56:37.75Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/43/09/2aea36ff60d16dd8879bdb2f5b3ee0ba8d08cbbdcdfe870e695ce3784385/execnet-2.1.1-py3-none-any.whl", hash = "sha256:26dee51f1b80cebd6d0ca8e74dd8745419761d3bef34163928cbebbdc4749fdc", size = 40612, upload-time = "2024-04-08T09:04:17.414Z" }, + { url = "https://files.pythonhosted.org/packages/ab/84/02fc1827e8cdded4aa65baef11296a9bbe595c474f0d6d758af082d849fd/execnet-2.1.2-py3-none-any.whl", hash = "sha256:67fba928dd5a544b783f6056f449e5e3931a5c378b128bc18501f7ea79e296ec", size = 40708, upload-time = "2025-11-12T09:56:36.333Z" }, ] [[package]] @@ -618,6 +2401,37 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/c1/ea/53f2148663b321f21b5a606bd5f191517cf40b7072c0497d3c92c4a13b1e/executing-2.2.1-py2.py3-none-any.whl", hash = "sha256:760643d3452b4d777d295bb167ccc74c64a81df23fb5e08eff250c425a4b2017", size = 28317, upload-time = "2025-09-01T09:48:08.5Z" }, ] +[[package]] +name = "falcon-perception" +version = "1.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "datasets" }, + { name = "einops" }, + { name = "hf-transfer" }, + { name = "hf-xet" }, + { name = "mlx", marker = "(platform_machine == 'arm64' and sys_platform == 'darwin') or (platform_machine != 'arm64' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (platform_machine != 'arm64' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (platform_machine != 'arm64' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (platform_machine != 'arm64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130')" }, + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and extra == 'extra-16-inference-models-falcon-perception') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "2.4.4", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and extra == 'extra-16-inference-models-falcon-perception') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "opencv-python", version = "4.13.0.92", source = { registry = "https://pypi.org/simple" } }, + { name = "pillow" }, + { name = "pycocotools" }, + { name = "requests" }, + { name = "safetensors" }, + { name = "scipy", version = "1.15.3", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and extra == 'extra-16-inference-models-falcon-perception') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "scipy", version = "1.17.1", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and extra == 'extra-16-inference-models-falcon-perception') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "tokenizers" }, + { name = "torch", version = "2.11.0", source = { registry = "https://pypi.org/simple" }, marker = "(sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torch", version = "2.11.0+cu128", source = { registry = "https://download.pytorch.org/whl/cu128" }, marker = "(sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torchvision", version = "0.26.0", source = { registry = "https://pypi.org/simple" }, marker = "(sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torchvision", version = "0.26.0+cu128", source = { registry = "https://download.pytorch.org/whl/cu128" }, marker = "(sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "tyro" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/2e/2f/56f6488722cce050fe03c86ea0fb6fd594382888cdc196e37db5c4b29751/falcon_perception-1.0.0.tar.gz", hash = "sha256:359c0561500b5e62f291ef5c8dfe22d38a355bbd36f3e884ebf2400553c53a30", size = 123613, upload-time = "2026-04-07T12:03:53.483Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/17/76/12ede62843f1b046b9d5f47dee4698939722b7b1401681dce4c4e867cf41/falcon_perception-1.0.0-py3-none-any.whl", hash = "sha256:19ee27753b5429ae0d3f8aa393c5e60ff2d42ead5b51da268a7ffb9c0be393a6", size = 132262, upload-time = "2026-04-07T12:03:51.835Z" }, +] + [[package]] name = "fastjsonschema" version = "2.21.2" @@ -629,62 +2443,174 @@ wheels = [ [[package]] name = "filelock" -version = "3.18.0" +version = "3.25.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/0a/10/c23352565a6544bdc5353e0b15fc1c563352101f30e24bf500207a54df9a/filelock-3.18.0.tar.gz", hash = "sha256:adbc88eabb99d2fec8c9c1b229b171f18afa655400173ddc653d5d01501fb9f2", size = 18075, upload-time = "2025-03-14T07:11:40.47Z" } +sdist = { url = "https://files.pythonhosted.org/packages/94/b8/00651a0f559862f3bb7d6f7477b192afe3f583cc5e26403b44e59a55ab34/filelock-3.25.2.tar.gz", hash = "sha256:b64ece2b38f4ca29dd3e810287aa8c48182bbecd1ae6e9ae126c9b35f1382694", size = 40480, upload-time = "2026-03-11T20:45:38.487Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/4d/36/2a115987e2d8c300a974597416d9de88f2444426de9571f4b59b2cca3acc/filelock-3.18.0-py3-none-any.whl", hash = "sha256:c401f4f8377c4464e6db25fff06205fd89bdd83b65eb0488ed1b160f780e21de", size = 16215, upload-time = "2025-03-14T07:11:39.145Z" }, + { url = "https://files.pythonhosted.org/packages/a4/a5/842ae8f0c08b61d6484b52f99a03510a3a72d23141942d216ebe81fefbce/filelock-3.25.2-py3-none-any.whl", hash = "sha256:ca8afb0da15f229774c9ad1b455ed96e85a81373065fb10446672f64444ddf70", size = 26759, upload-time = "2026-03-11T20:45:37.437Z" }, ] [[package]] name = "flatbuffers" -version = "25.2.10" +version = "25.12.19" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e4/30/eb5dce7994fc71a2f685d98ec33cc660c0a5887db5610137e60d8cbc4489/flatbuffers-25.2.10.tar.gz", hash = "sha256:97e451377a41262f8d9bd4295cc836133415cc03d8cb966410a4af92eb00d26e", size = 22170, upload-time = "2025-02-11T04:26:46.257Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b8/25/155f9f080d5e4bc0082edfda032ea2bc2b8fab3f4d25d46c1e9dd22a1a89/flatbuffers-25.2.10-py2.py3-none-any.whl", hash = "sha256:ebba5f4d5ea615af3f7fd70fc310636fbb2bbd1f566ac0a23d98dd412de50051", size = 30953, upload-time = "2025-02-11T04:26:44.484Z" }, + { url = "https://files.pythonhosted.org/packages/e8/2d/d2a548598be01649e2d46231d151a6c56d10b964d94043a335ae56ea2d92/flatbuffers-25.12.19-py2.py3-none-any.whl", hash = "sha256:7634f50c427838bb021c2d66a3d1168e9d199b0607e6329399f04846d42e20b4", size = 26661, upload-time = "2025-12-19T23:16:13.622Z" }, ] [[package]] name = "fonttools" -version = "4.58.4" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/2e/5a/1124b2c8cb3a8015faf552e92714040bcdbc145dfa29928891b02d147a18/fonttools-4.58.4.tar.gz", hash = "sha256:928a8009b9884ed3aae17724b960987575155ca23c6f0b8146e400cc9e0d44ba", size = 3525026, upload-time = "2025-06-13T17:25:15.426Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/ed/86/d22c24caa574449b56e994ed1a96d23b23af85557fb62a92df96439d3f6c/fonttools-4.58.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:834542f13fee7625ad753b2db035edb674b07522fcbdd0ed9e9a9e2a1034467f", size = 2748349, upload-time = "2025-06-13T17:23:49.179Z" }, - { url = "https://files.pythonhosted.org/packages/f9/b8/384aca93856def00e7de30341f1e27f439694857d82c35d74a809c705ed0/fonttools-4.58.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2e6c61ce330142525296170cd65666e46121fc0d44383cbbcfa39cf8f58383df", size = 2318565, upload-time = "2025-06-13T17:23:52.144Z" }, - { url = "https://files.pythonhosted.org/packages/1a/f2/273edfdc8d9db89ecfbbf659bd894f7e07b6d53448b19837a4bdba148d17/fonttools-4.58.4-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e9c75f8faa29579c0fbf29b56ae6a3660c6c025f3b671803cb6a9caa7e4e3a98", size = 4838855, upload-time = "2025-06-13T17:23:54.039Z" }, - { url = "https://files.pythonhosted.org/packages/13/fa/403703548c093c30b52ab37e109b369558afa221130e67f06bef7513f28a/fonttools-4.58.4-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:88dedcedbd5549e35b2ea3db3de02579c27e62e51af56779c021e7b33caadd0e", size = 4767637, upload-time = "2025-06-13T17:23:56.17Z" }, - { url = "https://files.pythonhosted.org/packages/6e/a8/3380e1e0bff6defb0f81c9abf274a5b4a0f30bc8cab4fd4e346c6f923b4c/fonttools-4.58.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ae80a895adab43586f4da1521d58fd4f4377cef322ee0cc205abcefa3a5effc3", size = 4819397, upload-time = "2025-06-13T17:23:58.263Z" }, - { url = "https://files.pythonhosted.org/packages/cd/1b/99e47eb17a8ca51d808622a4658584fa8f340857438a4e9d7ac326d4a041/fonttools-4.58.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:0d3acc7f0d151da116e87a182aefb569cf0a3c8e0fd4c9cd0a7c1e7d3e7adb26", size = 4926641, upload-time = "2025-06-13T17:24:00.368Z" }, - { url = "https://files.pythonhosted.org/packages/31/75/415254408f038e35b36c8525fc31feb8561f98445688dd2267c23eafd7a2/fonttools-4.58.4-cp310-cp310-win32.whl", hash = "sha256:1244f69686008e7e8d2581d9f37eef330a73fee3843f1107993eb82c9d306577", size = 2201917, upload-time = "2025-06-13T17:24:02.587Z" }, - { url = "https://files.pythonhosted.org/packages/c5/69/f019a15ed2946317c5318e1bcc8876f8a54a313848604ad1d4cfc4c07916/fonttools-4.58.4-cp310-cp310-win_amd64.whl", hash = "sha256:2a66c0af8a01eb2b78645af60f3b787de5fe5eb1fd8348163715b80bdbfbde1f", size = 2246327, upload-time = "2025-06-13T17:24:04.087Z" }, - { url = "https://files.pythonhosted.org/packages/17/7b/cc6e9bb41bab223bd2dc70ba0b21386b85f604e27f4c3206b4205085a2ab/fonttools-4.58.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a3841991c9ee2dc0562eb7f23d333d34ce81e8e27c903846f0487da21e0028eb", size = 2768901, upload-time = "2025-06-13T17:24:05.901Z" }, - { url = "https://files.pythonhosted.org/packages/3d/15/98d75df9f2b4e7605f3260359ad6e18e027c11fa549f74fce567270ac891/fonttools-4.58.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3c98f91b6a9604e7ffb5ece6ea346fa617f967c2c0944228801246ed56084664", size = 2328696, upload-time = "2025-06-13T17:24:09.18Z" }, - { url = "https://files.pythonhosted.org/packages/a8/c8/dc92b80f5452c9c40164e01b3f78f04b835a00e673bd9355ca257008ff61/fonttools-4.58.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ab9f891eb687ddf6a4e5f82901e00f992e18012ca97ab7acd15f13632acd14c1", size = 5018830, upload-time = "2025-06-13T17:24:11.282Z" }, - { url = "https://files.pythonhosted.org/packages/19/48/8322cf177680505d6b0b6062e204f01860cb573466a88077a9b795cb70e8/fonttools-4.58.4-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:891c5771e8f0094b7c0dc90eda8fc75e72930b32581418f2c285a9feedfd9a68", size = 4960922, upload-time = "2025-06-13T17:24:14.9Z" }, - { url = "https://files.pythonhosted.org/packages/14/e0/2aff149ed7eb0916de36da513d473c6fff574a7146891ce42de914899395/fonttools-4.58.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:43ba4d9646045c375d22e3473b7d82b18b31ee2ac715cd94220ffab7bc2d5c1d", size = 4997135, upload-time = "2025-06-13T17:24:16.959Z" }, - { url = "https://files.pythonhosted.org/packages/e6/6f/4d9829b29a64a2e63a121cb11ecb1b6a9524086eef3e35470949837a1692/fonttools-4.58.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33d19f16e6d2ffd6669bda574a6589941f6c99a8d5cfb9f464038244c71555de", size = 5108701, upload-time = "2025-06-13T17:24:18.849Z" }, - { url = "https://files.pythonhosted.org/packages/6f/1e/2d656ddd1b0cd0d222f44b2d008052c2689e66b702b9af1cd8903ddce319/fonttools-4.58.4-cp311-cp311-win32.whl", hash = "sha256:b59e5109b907da19dc9df1287454821a34a75f2632a491dd406e46ff432c2a24", size = 2200177, upload-time = "2025-06-13T17:24:20.823Z" }, - { url = "https://files.pythonhosted.org/packages/fb/83/ba71ad053fddf4157cb0697c8da8eff6718d059f2a22986fa5f312b49c92/fonttools-4.58.4-cp311-cp311-win_amd64.whl", hash = "sha256:3d471a5b567a0d1648f2e148c9a8bcf00d9ac76eb89e976d9976582044cc2509", size = 2247892, upload-time = "2025-06-13T17:24:22.927Z" }, - { url = "https://files.pythonhosted.org/packages/04/3c/1d1792bfe91ef46f22a3d23b4deb514c325e73c17d4f196b385b5e2faf1c/fonttools-4.58.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:462211c0f37a278494e74267a994f6be9a2023d0557aaa9ecbcbfce0f403b5a6", size = 2754082, upload-time = "2025-06-13T17:24:24.862Z" }, - { url = "https://files.pythonhosted.org/packages/2a/1f/2b261689c901a1c3bc57a6690b0b9fc21a9a93a8b0c83aae911d3149f34e/fonttools-4.58.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:0c7a12fb6f769165547f00fcaa8d0df9517603ae7e04b625e5acb8639809b82d", size = 2321677, upload-time = "2025-06-13T17:24:26.815Z" }, - { url = "https://files.pythonhosted.org/packages/fe/6b/4607add1755a1e6581ae1fc0c9a640648e0d9cdd6591cc2d581c2e07b8c3/fonttools-4.58.4-cp312-cp312-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2d42c63020a922154add0a326388a60a55504629edc3274bc273cd3806b4659f", size = 4896354, upload-time = "2025-06-13T17:24:28.428Z" }, - { url = "https://files.pythonhosted.org/packages/cd/95/34b4f483643d0cb11a1f830b72c03fdd18dbd3792d77a2eb2e130a96fada/fonttools-4.58.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8f2b4e6fd45edc6805f5f2c355590b092ffc7e10a945bd6a569fc66c1d2ae7aa", size = 4941633, upload-time = "2025-06-13T17:24:30.568Z" }, - { url = "https://files.pythonhosted.org/packages/81/ac/9bafbdb7694059c960de523e643fa5a61dd2f698f3f72c0ca18ae99257c7/fonttools-4.58.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f155b927f6efb1213a79334e4cb9904d1e18973376ffc17a0d7cd43d31981f1e", size = 4886170, upload-time = "2025-06-13T17:24:32.724Z" }, - { url = "https://files.pythonhosted.org/packages/ae/44/a3a3b70d5709405f7525bb7cb497b4e46151e0c02e3c8a0e40e5e9fe030b/fonttools-4.58.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e38f687d5de97c7fb7da3e58169fb5ba349e464e141f83c3c2e2beb91d317816", size = 5037851, upload-time = "2025-06-13T17:24:35.034Z" }, - { url = "https://files.pythonhosted.org/packages/21/cb/e8923d197c78969454eb876a4a55a07b59c9c4c46598f02b02411dc3b45c/fonttools-4.58.4-cp312-cp312-win32.whl", hash = "sha256:636c073b4da9db053aa683db99580cac0f7c213a953b678f69acbca3443c12cc", size = 2187428, upload-time = "2025-06-13T17:24:36.996Z" }, - { url = "https://files.pythonhosted.org/packages/46/e6/fe50183b1a0e1018e7487ee740fa8bb127b9f5075a41e20d017201e8ab14/fonttools-4.58.4-cp312-cp312-win_amd64.whl", hash = "sha256:82e8470535743409b30913ba2822e20077acf9ea70acec40b10fcf5671dceb58", size = 2236649, upload-time = "2025-06-13T17:24:38.985Z" }, - { url = "https://files.pythonhosted.org/packages/0b/2f/c536b5b9bb3c071e91d536a4d11f969e911dbb6b227939f4c5b0bca090df/fonttools-4.58.4-py3-none-any.whl", hash = "sha256:a10ce13a13f26cbb9f37512a4346bb437ad7e002ff6fa966a7ce7ff5ac3528bd", size = 1114660, upload-time = "2025-06-13T17:25:13.321Z" }, +version = "4.62.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/9a/08/7012b00a9a5874311b639c3920270c36ee0c445b69d9989a85e5c92ebcb0/fonttools-4.62.1.tar.gz", hash = "sha256:e54c75fd6041f1122476776880f7c3c3295ffa31962dc6ebe2543c00dca58b5d", size = 3580737, upload-time = "2026-03-13T13:54:25.52Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5a/ff/532ed43808b469c807e8cb6b21358da3fe6fd51486b3a8c93db0bb5d957f/fonttools-4.62.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ad5cca75776cd453b1b035b530e943334957ae152a36a88a320e779d61fc980c", size = 2873740, upload-time = "2026-03-13T13:52:11.822Z" }, + { url = "https://files.pythonhosted.org/packages/85/e4/2318d2b430562da7227010fb2bb029d2fa54d7b46443ae8942bab224e2a0/fonttools-4.62.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0b3ae47e8636156a9accff64c02c0924cbebad62854c4a6dbdc110cd5b4b341a", size = 2417649, upload-time = "2026-03-13T13:52:14.605Z" }, + { url = "https://files.pythonhosted.org/packages/4c/28/40f15523b5188598018e7956899fed94eb7debec89e2dd70cb4a8df90492/fonttools-4.62.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c9b9e288b4da2f64fd6180644221749de651703e8d0c16bd4b719533a3a7d6e3", size = 4935213, upload-time = "2026-03-13T13:52:17.399Z" }, + { url = "https://files.pythonhosted.org/packages/42/09/7dbe3d7023f57d9b580cfa832109d521988112fd59dddfda3fddda8218f9/fonttools-4.62.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7bca7a1c1faf235ffe25d4f2e555246b4750220b38de8261d94ebc5ce8a23c23", size = 4892374, upload-time = "2026-03-13T13:52:20.175Z" }, + { url = "https://files.pythonhosted.org/packages/d1/2d/84509a2e32cb925371560ef5431365d8da2183c11d98e5b4b8b4e42426a5/fonttools-4.62.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b4e0fcf265ad26e487c56cb12a42dffe7162de708762db951e1b3f755319507d", size = 4911856, upload-time = "2026-03-13T13:52:22.777Z" }, + { url = "https://files.pythonhosted.org/packages/a5/80/df28131379eed93d9e6e6fccd3bf6e3d077bebbfe98cc83f21bbcd83ed02/fonttools-4.62.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:2d850f66830a27b0d498ee05adb13a3781637b1826982cd7e2b3789ef0cc71ae", size = 5031712, upload-time = "2026-03-13T13:52:25.14Z" }, + { url = "https://files.pythonhosted.org/packages/3d/03/3c8f09aad64230cd6d921ae7a19f9603c36f70930b00459f112706f6769a/fonttools-4.62.1-cp310-cp310-win32.whl", hash = "sha256:486f32c8047ccd05652aba17e4a8819a3a9d78570eb8a0e3b4503142947880ed", size = 1507878, upload-time = "2026-03-13T13:52:28.149Z" }, + { url = "https://files.pythonhosted.org/packages/dd/ec/f53f626f8f3e89f4cadd8fc08f3452c8fd182c951ad5caa35efac22b29ab/fonttools-4.62.1-cp310-cp310-win_amd64.whl", hash = "sha256:5a648bde915fba9da05ae98856987ca91ba832949a9e2888b48c47ef8b96c5a9", size = 1556766, upload-time = "2026-03-13T13:52:30.814Z" }, + { url = "https://files.pythonhosted.org/packages/88/39/23ff32561ec8d45a4d48578b4d241369d9270dc50926c017570e60893701/fonttools-4.62.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:40975849bac44fb0b9253d77420c6d8b523ac4dcdcefeff6e4d706838a5b80f7", size = 2871039, upload-time = "2026-03-13T13:52:33.127Z" }, + { url = "https://files.pythonhosted.org/packages/24/7f/66d3f8a9338a9b67fe6e1739f47e1cd5cee78bd3bc1206ef9b0b982289a5/fonttools-4.62.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9dde91633f77fa576879a0c76b1d89de373cae751a98ddf0109d54e173b40f14", size = 2416346, upload-time = "2026-03-13T13:52:35.676Z" }, + { url = "https://files.pythonhosted.org/packages/aa/53/5276ceba7bff95da7793a07c5284e1da901cf00341ce5e2f3273056c0cca/fonttools-4.62.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6acb4109f8bee00fec985c8c7afb02299e35e9c94b57287f3ea542f28bd0b0a7", size = 5100897, upload-time = "2026-03-13T13:52:38.102Z" }, + { url = "https://files.pythonhosted.org/packages/cc/a1/40a5c4d8e28b0851d53a8eeeb46fbd73c325a2a9a165f290a5ed90e6c597/fonttools-4.62.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1c5c25671ce8805e0d080e2ffdeca7f1e86778c5cbfbeae86d7f866d8830517b", size = 5071078, upload-time = "2026-03-13T13:52:41.305Z" }, + { url = "https://files.pythonhosted.org/packages/e3/be/d378fca4c65ea1956fee6d90ace6e861776809cbbc5af22388a090c3c092/fonttools-4.62.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a5d8825e1140f04e6c99bb7d37a9e31c172f3bc208afbe02175339e699c710e1", size = 5076908, upload-time = "2026-03-13T13:52:44.122Z" }, + { url = "https://files.pythonhosted.org/packages/f8/d9/ae6a1d0693a4185a84605679c8a1f719a55df87b9c6e8e817bfdd9ef5936/fonttools-4.62.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:268abb1cb221e66c014acc234e872b7870d8b5d4657a83a8f4205094c32d2416", size = 5202275, upload-time = "2026-03-13T13:52:46.591Z" }, + { url = "https://files.pythonhosted.org/packages/54/6c/af95d9c4efb15cabff22642b608342f2bd67137eea6107202d91b5b03184/fonttools-4.62.1-cp311-cp311-win32.whl", hash = "sha256:942b03094d7edbb99bdf1ae7e9090898cad7bf9030b3d21f33d7072dbcb51a53", size = 2293075, upload-time = "2026-03-13T13:52:48.711Z" }, + { url = "https://files.pythonhosted.org/packages/d3/97/bf54c5b3f2be34e1f143e6db838dfdc54f2ffa3e68c738934c82f3b2a08d/fonttools-4.62.1-cp311-cp311-win_amd64.whl", hash = "sha256:e8514f4924375f77084e81467e63238b095abda5107620f49421c368a6017ed2", size = 2344593, upload-time = "2026-03-13T13:52:50.725Z" }, + { url = "https://files.pythonhosted.org/packages/47/d4/dbacced3953544b9a93088cc10ef2b596d348c983d5c67a404fa41ec51ba/fonttools-4.62.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:90365821debbd7db678809c7491ca4acd1e0779b9624cdc6ddaf1f31992bf974", size = 2870219, upload-time = "2026-03-13T13:52:53.664Z" }, + { url = "https://files.pythonhosted.org/packages/66/9e/a769c8e99b81e5a87ab7e5e7236684de4e96246aae17274e5347d11ebd78/fonttools-4.62.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:12859ff0b47dd20f110804c3e0d0970f7b832f561630cd879969011541a464a9", size = 2414891, upload-time = "2026-03-13T13:52:56.493Z" }, + { url = "https://files.pythonhosted.org/packages/69/64/f19a9e3911968c37e1e620e14dfc5778299e1474f72f4e57c5ec771d9489/fonttools-4.62.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9c125ffa00c3d9003cdaaf7f2c79e6e535628093e14b5de1dccb08859b680936", size = 5033197, upload-time = "2026-03-13T13:52:59.179Z" }, + { url = "https://files.pythonhosted.org/packages/9b/8a/99c8b3c3888c5c474c08dbfd7c8899786de9604b727fcefb055b42c84bba/fonttools-4.62.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:149f7d84afca659d1a97e39a4778794a2f83bf344c5ee5134e09995086cc2392", size = 4988768, upload-time = "2026-03-13T13:53:02.761Z" }, + { url = "https://files.pythonhosted.org/packages/d1/c6/0f904540d3e6ab463c1243a0d803504826a11604c72dd58c2949796a1762/fonttools-4.62.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0aa72c43a601cfa9273bb1ae0518f1acadc01ee181a6fc60cd758d7fdadffc04", size = 4971512, upload-time = "2026-03-13T13:53:05.678Z" }, + { url = "https://files.pythonhosted.org/packages/29/0b/5cbef6588dc9bd6b5c9ad6a4d5a8ca384d0cea089da31711bbeb4f9654a6/fonttools-4.62.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:19177c8d96c7c36359266e571c5173bcee9157b59cfc8cb0153c5673dc5a3a7d", size = 5122723, upload-time = "2026-03-13T13:53:08.662Z" }, + { url = "https://files.pythonhosted.org/packages/4a/47/b3a5342d381595ef439adec67848bed561ab7fdb1019fa522e82101b7d9c/fonttools-4.62.1-cp312-cp312-win32.whl", hash = "sha256:a24decd24d60744ee8b4679d38e88b8303d86772053afc29b19d23bb8207803c", size = 2281278, upload-time = "2026-03-13T13:53:10.998Z" }, + { url = "https://files.pythonhosted.org/packages/28/b1/0c2ab56a16f409c6c8a68816e6af707827ad5d629634691ff60a52879792/fonttools-4.62.1-cp312-cp312-win_amd64.whl", hash = "sha256:9e7863e10b3de72376280b515d35b14f5eeed639d1aa7824f4cf06779ec65e42", size = 2331414, upload-time = "2026-03-13T13:53:13.992Z" }, + { url = "https://files.pythonhosted.org/packages/fd/ba/56147c165442cc5ba7e82ecf301c9a68353cede498185869e6e02b4c264f/fonttools-4.62.1-py3-none-any.whl", hash = "sha256:7487782e2113861f4ddcc07c3436450659e3caa5e470b27dc2177cade2d8e7fd", size = 1152647, upload-time = "2026-03-13T13:54:22.735Z" }, +] + +[[package]] +name = "frozenlist" +version = "1.8.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2d/f5/c831fac6cc817d26fd54c7eaccd04ef7e0288806943f7cc5bbf69f3ac1f0/frozenlist-1.8.0.tar.gz", hash = "sha256:3ede829ed8d842f6cd48fc7081d7a41001a56f1f38603f9d49bf3020d59a31ad", size = 45875, upload-time = "2025-10-06T05:38:17.865Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/83/4a/557715d5047da48d54e659203b9335be7bfaafda2c3f627b7c47e0b3aaf3/frozenlist-1.8.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b37f6d31b3dcea7deb5e9696e529a6aa4a898adc33db82da12e4c60a7c4d2011", size = 86230, upload-time = "2025-10-06T05:35:23.699Z" }, + { url = "https://files.pythonhosted.org/packages/a2/fb/c85f9fed3ea8fe8740e5b46a59cc141c23b842eca617da8876cfce5f760e/frozenlist-1.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ef2b7b394f208233e471abc541cc6991f907ffd47dc72584acee3147899d6565", size = 49621, upload-time = "2025-10-06T05:35:25.341Z" }, + { url = "https://files.pythonhosted.org/packages/63/70/26ca3f06aace16f2352796b08704338d74b6d1a24ca38f2771afbb7ed915/frozenlist-1.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a88f062f072d1589b7b46e951698950e7da00442fc1cacbe17e19e025dc327ad", size = 49889, upload-time = "2025-10-06T05:35:26.797Z" }, + { url = "https://files.pythonhosted.org/packages/5d/ed/c7895fd2fde7f3ee70d248175f9b6cdf792fb741ab92dc59cd9ef3bd241b/frozenlist-1.8.0-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:f57fb59d9f385710aa7060e89410aeb5058b99e62f4d16b08b91986b9a2140c2", size = 219464, upload-time = "2025-10-06T05:35:28.254Z" }, + { url = "https://files.pythonhosted.org/packages/6b/83/4d587dccbfca74cb8b810472392ad62bfa100bf8108c7223eb4c4fa2f7b3/frozenlist-1.8.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:799345ab092bee59f01a915620b5d014698547afd011e691a208637312db9186", size = 221649, upload-time = "2025-10-06T05:35:29.454Z" }, + { url = "https://files.pythonhosted.org/packages/6a/c6/fd3b9cd046ec5fff9dab66831083bc2077006a874a2d3d9247dea93ddf7e/frozenlist-1.8.0-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:c23c3ff005322a6e16f71bf8692fcf4d5a304aaafe1e262c98c6d4adc7be863e", size = 219188, upload-time = "2025-10-06T05:35:30.951Z" }, + { url = "https://files.pythonhosted.org/packages/ce/80/6693f55eb2e085fc8afb28cf611448fb5b90e98e068fa1d1b8d8e66e5c7d/frozenlist-1.8.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8a76ea0f0b9dfa06f254ee06053d93a600865b3274358ca48a352ce4f0798450", size = 231748, upload-time = "2025-10-06T05:35:32.101Z" }, + { url = "https://files.pythonhosted.org/packages/97/d6/e9459f7c5183854abd989ba384fe0cc1a0fb795a83c033f0571ec5933ca4/frozenlist-1.8.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:c7366fe1418a6133d5aa824ee53d406550110984de7637d65a178010f759c6ef", size = 236351, upload-time = "2025-10-06T05:35:33.834Z" }, + { url = "https://files.pythonhosted.org/packages/97/92/24e97474b65c0262e9ecd076e826bfd1d3074adcc165a256e42e7b8a7249/frozenlist-1.8.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:13d23a45c4cebade99340c4165bd90eeb4a56c6d8a9d8aa49568cac19a6d0dc4", size = 218767, upload-time = "2025-10-06T05:35:35.205Z" }, + { url = "https://files.pythonhosted.org/packages/ee/bf/dc394a097508f15abff383c5108cb8ad880d1f64a725ed3b90d5c2fbf0bb/frozenlist-1.8.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:e4a3408834f65da56c83528fb52ce7911484f0d1eaf7b761fc66001db1646eff", size = 235887, upload-time = "2025-10-06T05:35:36.354Z" }, + { url = "https://files.pythonhosted.org/packages/40/90/25b201b9c015dbc999a5baf475a257010471a1fa8c200c843fd4abbee725/frozenlist-1.8.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:42145cd2748ca39f32801dad54aeea10039da6f86e303659db90db1c4b614c8c", size = 228785, upload-time = "2025-10-06T05:35:37.949Z" }, + { url = "https://files.pythonhosted.org/packages/84/f4/b5bc148df03082f05d2dd30c089e269acdbe251ac9a9cf4e727b2dbb8a3d/frozenlist-1.8.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:e2de870d16a7a53901e41b64ffdf26f2fbb8917b3e6ebf398098d72c5b20bd7f", size = 230312, upload-time = "2025-10-06T05:35:39.178Z" }, + { url = "https://files.pythonhosted.org/packages/db/4b/87e95b5d15097c302430e647136b7d7ab2398a702390cf4c8601975709e7/frozenlist-1.8.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:20e63c9493d33ee48536600d1a5c95eefc870cd71e7ab037763d1fbb89cc51e7", size = 217650, upload-time = "2025-10-06T05:35:40.377Z" }, + { url = "https://files.pythonhosted.org/packages/e5/70/78a0315d1fea97120591a83e0acd644da638c872f142fd72a6cebee825f3/frozenlist-1.8.0-cp310-cp310-win32.whl", hash = "sha256:adbeebaebae3526afc3c96fad434367cafbfd1b25d72369a9e5858453b1bb71a", size = 39659, upload-time = "2025-10-06T05:35:41.863Z" }, + { url = "https://files.pythonhosted.org/packages/66/aa/3f04523fb189a00e147e60c5b2205126118f216b0aa908035c45336e27e4/frozenlist-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:667c3777ca571e5dbeb76f331562ff98b957431df140b54c85fd4d52eea8d8f6", size = 43837, upload-time = "2025-10-06T05:35:43.205Z" }, + { url = "https://files.pythonhosted.org/packages/39/75/1135feecdd7c336938bd55b4dc3b0dfc46d85b9be12ef2628574b28de776/frozenlist-1.8.0-cp310-cp310-win_arm64.whl", hash = "sha256:80f85f0a7cc86e7a54c46d99c9e1318ff01f4687c172ede30fd52d19d1da1c8e", size = 39989, upload-time = "2025-10-06T05:35:44.596Z" }, + { url = "https://files.pythonhosted.org/packages/bc/03/077f869d540370db12165c0aa51640a873fb661d8b315d1d4d67b284d7ac/frozenlist-1.8.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:09474e9831bc2b2199fad6da3c14c7b0fbdd377cce9d3d77131be28906cb7d84", size = 86912, upload-time = "2025-10-06T05:35:45.98Z" }, + { url = "https://files.pythonhosted.org/packages/df/b5/7610b6bd13e4ae77b96ba85abea1c8cb249683217ef09ac9e0ae93f25a91/frozenlist-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:17c883ab0ab67200b5f964d2b9ed6b00971917d5d8a92df149dc2c9779208ee9", size = 50046, upload-time = "2025-10-06T05:35:47.009Z" }, + { url = "https://files.pythonhosted.org/packages/6e/ef/0e8f1fe32f8a53dd26bdd1f9347efe0778b0fddf62789ea683f4cc7d787d/frozenlist-1.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fa47e444b8ba08fffd1c18e8cdb9a75db1b6a27f17507522834ad13ed5922b93", size = 50119, upload-time = "2025-10-06T05:35:48.38Z" }, + { url = "https://files.pythonhosted.org/packages/11/b1/71a477adc7c36e5fb628245dfbdea2166feae310757dea848d02bd0689fd/frozenlist-1.8.0-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2552f44204b744fba866e573be4c1f9048d6a324dfe14475103fd51613eb1d1f", size = 231067, upload-time = "2025-10-06T05:35:49.97Z" }, + { url = "https://files.pythonhosted.org/packages/45/7e/afe40eca3a2dc19b9904c0f5d7edfe82b5304cb831391edec0ac04af94c2/frozenlist-1.8.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:957e7c38f250991e48a9a73e6423db1bb9dd14e722a10f6b8bb8e16a0f55f695", size = 233160, upload-time = "2025-10-06T05:35:51.729Z" }, + { url = "https://files.pythonhosted.org/packages/a6/aa/7416eac95603ce428679d273255ffc7c998d4132cfae200103f164b108aa/frozenlist-1.8.0-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:8585e3bb2cdea02fc88ffa245069c36555557ad3609e83be0ec71f54fd4abb52", size = 228544, upload-time = "2025-10-06T05:35:53.246Z" }, + { url = "https://files.pythonhosted.org/packages/8b/3d/2a2d1f683d55ac7e3875e4263d28410063e738384d3adc294f5ff3d7105e/frozenlist-1.8.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:edee74874ce20a373d62dc28b0b18b93f645633c2943fd90ee9d898550770581", size = 243797, upload-time = "2025-10-06T05:35:54.497Z" }, + { url = "https://files.pythonhosted.org/packages/78/1e/2d5565b589e580c296d3bb54da08d206e797d941a83a6fdea42af23be79c/frozenlist-1.8.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:c9a63152fe95756b85f31186bddf42e4c02c6321207fd6601a1c89ebac4fe567", size = 247923, upload-time = "2025-10-06T05:35:55.861Z" }, + { url = "https://files.pythonhosted.org/packages/aa/c3/65872fcf1d326a7f101ad4d86285c403c87be7d832b7470b77f6d2ed5ddc/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b6db2185db9be0a04fecf2f241c70b63b1a242e2805be291855078f2b404dd6b", size = 230886, upload-time = "2025-10-06T05:35:57.399Z" }, + { url = "https://files.pythonhosted.org/packages/a0/76/ac9ced601d62f6956f03cc794f9e04c81719509f85255abf96e2510f4265/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:f4be2e3d8bc8aabd566f8d5b8ba7ecc09249d74ba3c9ed52e54dc23a293f0b92", size = 245731, upload-time = "2025-10-06T05:35:58.563Z" }, + { url = "https://files.pythonhosted.org/packages/b9/49/ecccb5f2598daf0b4a1415497eba4c33c1e8ce07495eb07d2860c731b8d5/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:c8d1634419f39ea6f5c427ea2f90ca85126b54b50837f31497f3bf38266e853d", size = 241544, upload-time = "2025-10-06T05:35:59.719Z" }, + { url = "https://files.pythonhosted.org/packages/53/4b/ddf24113323c0bbcc54cb38c8b8916f1da7165e07b8e24a717b4a12cbf10/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:1a7fa382a4a223773ed64242dbe1c9c326ec09457e6b8428efb4118c685c3dfd", size = 241806, upload-time = "2025-10-06T05:36:00.959Z" }, + { url = "https://files.pythonhosted.org/packages/a7/fb/9b9a084d73c67175484ba2789a59f8eebebd0827d186a8102005ce41e1ba/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:11847b53d722050808926e785df837353bd4d75f1d494377e59b23594d834967", size = 229382, upload-time = "2025-10-06T05:36:02.22Z" }, + { url = "https://files.pythonhosted.org/packages/95/a3/c8fb25aac55bf5e12dae5c5aa6a98f85d436c1dc658f21c3ac73f9fa95e5/frozenlist-1.8.0-cp311-cp311-win32.whl", hash = "sha256:27c6e8077956cf73eadd514be8fb04d77fc946a7fe9f7fe167648b0b9085cc25", size = 39647, upload-time = "2025-10-06T05:36:03.409Z" }, + { url = "https://files.pythonhosted.org/packages/0a/f5/603d0d6a02cfd4c8f2a095a54672b3cf967ad688a60fb9faf04fc4887f65/frozenlist-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:ac913f8403b36a2c8610bbfd25b8013488533e71e62b4b4adce9c86c8cea905b", size = 44064, upload-time = "2025-10-06T05:36:04.368Z" }, + { url = "https://files.pythonhosted.org/packages/5d/16/c2c9ab44e181f043a86f9a8f84d5124b62dbcb3a02c0977ec72b9ac1d3e0/frozenlist-1.8.0-cp311-cp311-win_arm64.whl", hash = "sha256:d4d3214a0f8394edfa3e303136d0575eece0745ff2b47bd2cb2e66dd92d4351a", size = 39937, upload-time = "2025-10-06T05:36:05.669Z" }, + { url = "https://files.pythonhosted.org/packages/69/29/948b9aa87e75820a38650af445d2ef2b6b8a6fab1a23b6bb9e4ef0be2d59/frozenlist-1.8.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:78f7b9e5d6f2fdb88cdde9440dc147259b62b9d3b019924def9f6478be254ac1", size = 87782, upload-time = "2025-10-06T05:36:06.649Z" }, + { url = "https://files.pythonhosted.org/packages/64/80/4f6e318ee2a7c0750ed724fa33a4bdf1eacdc5a39a7a24e818a773cd91af/frozenlist-1.8.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:229bf37d2e4acdaf808fd3f06e854a4a7a3661e871b10dc1f8f1896a3b05f18b", size = 50594, upload-time = "2025-10-06T05:36:07.69Z" }, + { url = "https://files.pythonhosted.org/packages/2b/94/5c8a2b50a496b11dd519f4a24cb5496cf125681dd99e94c604ccdea9419a/frozenlist-1.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f833670942247a14eafbb675458b4e61c82e002a148f49e68257b79296e865c4", size = 50448, upload-time = "2025-10-06T05:36:08.78Z" }, + { url = "https://files.pythonhosted.org/packages/6a/bd/d91c5e39f490a49df14320f4e8c80161cfcce09f1e2cde1edd16a551abb3/frozenlist-1.8.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:494a5952b1c597ba44e0e78113a7266e656b9794eec897b19ead706bd7074383", size = 242411, upload-time = "2025-10-06T05:36:09.801Z" }, + { url = "https://files.pythonhosted.org/packages/8f/83/f61505a05109ef3293dfb1ff594d13d64a2324ac3482be2cedc2be818256/frozenlist-1.8.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:96f423a119f4777a4a056b66ce11527366a8bb92f54e541ade21f2374433f6d4", size = 243014, upload-time = "2025-10-06T05:36:11.394Z" }, + { url = "https://files.pythonhosted.org/packages/d8/cb/cb6c7b0f7d4023ddda30cf56b8b17494eb3a79e3fda666bf735f63118b35/frozenlist-1.8.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3462dd9475af2025c31cc61be6652dfa25cbfb56cbbf52f4ccfe029f38decaf8", size = 234909, upload-time = "2025-10-06T05:36:12.598Z" }, + { url = "https://files.pythonhosted.org/packages/31/c5/cd7a1f3b8b34af009fb17d4123c5a778b44ae2804e3ad6b86204255f9ec5/frozenlist-1.8.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c4c800524c9cd9bac5166cd6f55285957fcfc907db323e193f2afcd4d9abd69b", size = 250049, upload-time = "2025-10-06T05:36:14.065Z" }, + { url = "https://files.pythonhosted.org/packages/c0/01/2f95d3b416c584a1e7f0e1d6d31998c4a795f7544069ee2e0962a4b60740/frozenlist-1.8.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d6a5df73acd3399d893dafc71663ad22534b5aa4f94e8a2fabfe856c3c1b6a52", size = 256485, upload-time = "2025-10-06T05:36:15.39Z" }, + { url = "https://files.pythonhosted.org/packages/ce/03/024bf7720b3abaebcff6d0793d73c154237b85bdf67b7ed55e5e9596dc9a/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:405e8fe955c2280ce66428b3ca55e12b3c4e9c336fb2103a4937e891c69a4a29", size = 237619, upload-time = "2025-10-06T05:36:16.558Z" }, + { url = "https://files.pythonhosted.org/packages/69/fa/f8abdfe7d76b731f5d8bd217827cf6764d4f1d9763407e42717b4bed50a0/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:908bd3f6439f2fef9e85031b59fd4f1297af54415fb60e4254a95f75b3cab3f3", size = 250320, upload-time = "2025-10-06T05:36:17.821Z" }, + { url = "https://files.pythonhosted.org/packages/f5/3c/b051329f718b463b22613e269ad72138cc256c540f78a6de89452803a47d/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:294e487f9ec720bd8ffcebc99d575f7eff3568a08a253d1ee1a0378754b74143", size = 246820, upload-time = "2025-10-06T05:36:19.046Z" }, + { url = "https://files.pythonhosted.org/packages/0f/ae/58282e8f98e444b3f4dd42448ff36fa38bef29e40d40f330b22e7108f565/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:74c51543498289c0c43656701be6b077f4b265868fa7f8a8859c197006efb608", size = 250518, upload-time = "2025-10-06T05:36:20.763Z" }, + { url = "https://files.pythonhosted.org/packages/8f/96/007e5944694d66123183845a106547a15944fbbb7154788cbf7272789536/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:776f352e8329135506a1d6bf16ac3f87bc25b28e765949282dcc627af36123aa", size = 239096, upload-time = "2025-10-06T05:36:22.129Z" }, + { url = "https://files.pythonhosted.org/packages/66/bb/852b9d6db2fa40be96f29c0d1205c306288f0684df8fd26ca1951d461a56/frozenlist-1.8.0-cp312-cp312-win32.whl", hash = "sha256:433403ae80709741ce34038da08511d4a77062aa924baf411ef73d1146e74faf", size = 39985, upload-time = "2025-10-06T05:36:23.661Z" }, + { url = "https://files.pythonhosted.org/packages/b8/af/38e51a553dd66eb064cdf193841f16f077585d4d28394c2fa6235cb41765/frozenlist-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:34187385b08f866104f0c0617404c8eb08165ab1272e884abc89c112e9c00746", size = 44591, upload-time = "2025-10-06T05:36:24.958Z" }, + { url = "https://files.pythonhosted.org/packages/a7/06/1dc65480ab147339fecc70797e9c2f69d9cea9cf38934ce08df070fdb9cb/frozenlist-1.8.0-cp312-cp312-win_arm64.whl", hash = "sha256:fe3c58d2f5db5fbd18c2987cba06d51b0529f52bc3a6cdc33d3f4eab725104bd", size = 40102, upload-time = "2025-10-06T05:36:26.333Z" }, + { url = "https://files.pythonhosted.org/packages/9a/9a/e35b4a917281c0b8419d4207f4334c8e8c5dbf4f3f5f9ada73958d937dcc/frozenlist-1.8.0-py3-none-any.whl", hash = "sha256:0c18a16eab41e82c295618a77502e17b195883241c563b00f0aa5106fc4eaa0d", size = 13409, upload-time = "2025-10-06T05:38:16.721Z" }, ] [[package]] name = "fsspec" -version = "2025.5.1" +version = "2026.2.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/00/f7/27f15d41f0ed38e8fcc488584b57e902b331da7f7c6dcda53721b15838fc/fsspec-2025.5.1.tar.gz", hash = "sha256:2e55e47a540b91843b755e83ded97c6e897fa0942b11490113f09e9c443c2475", size = 303033, upload-time = "2025-05-24T12:03:23.792Z" } +resolution-markers = [ + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin'", +] +sdist = { url = "https://files.pythonhosted.org/packages/51/7c/f60c259dcbf4f0c47cc4ddb8f7720d2dcdc8888c8e5ad84c73ea4531cc5b/fsspec-2026.2.0.tar.gz", hash = "sha256:6544e34b16869f5aacd5b90bdf1a71acb37792ea3ddf6125ee69a22a53fb8bff", size = 313441, upload-time = "2026-02-05T21:50:53.743Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/bb/61/78c7b3851add1481b048b5fdc29067397a1784e2910592bc81bb3f608635/fsspec-2025.5.1-py3-none-any.whl", hash = "sha256:24d3a2e663d5fc735ab256263c4075f374a174c3410c0b25e5bd1970bceaa462", size = 199052, upload-time = "2025-05-24T12:03:21.66Z" }, + { url = "https://files.pythonhosted.org/packages/e6/ab/fb21f4c939bb440104cc2b396d3be1d9b7a9fd3c6c2a53d98c45b3d7c954/fsspec-2026.2.0-py3-none-any.whl", hash = "sha256:98de475b5cb3bd66bedd5c4679e87b4fdfe1a3bf4d707b151b3c07e58c9a2437", size = 202505, upload-time = "2026-02-05T21:50:51.819Z" }, +] + +[package.optional-dependencies] +http = [ + { name = "aiohttp" }, +] + +[[package]] +name = "fsspec" +version = "2026.3.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin'", +] +sdist = { url = "https://files.pythonhosted.org/packages/e1/cf/b50ddf667c15276a9ab15a70ef5f257564de271957933ffea49d2cdbcdfb/fsspec-2026.3.0.tar.gz", hash = "sha256:1ee6a0e28677557f8c2f994e3eea77db6392b4de9cd1f5d7a9e87a0ae9d01b41", size = 313547, upload-time = "2026-03-27T19:11:14.892Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d5/1f/5f4a3cd9e4440e9d9bc78ad0a91a1c8d46b4d429d5239ebe6793c9fe5c41/fsspec-2026.3.0-py3-none-any.whl", hash = "sha256:d2ceafaad1b3457968ed14efa28798162f1638dbb5d2a6868a2db002a5ee39a4", size = 202595, upload-time = "2026-03-27T19:11:13.595Z" }, ] [[package]] @@ -737,14 +2663,37 @@ wheels = [ [[package]] name = "griffe" -version = "1.15.0" +version = "2.0.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "griffecli" }, + { name = "griffelib" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/4a/49/eb6d2935e27883af92c930ed40cc4c69bcd32c402be43b8ca4ab20510f67/griffe-2.0.2.tar.gz", hash = "sha256:c5d56326d159f274492e9bf93a9895cec101155d944caa66d0fc4e0c13751b92", size = 293757, upload-time = "2026-03-27T11:34:52.205Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/94/c0/2bb018eecf9a83c68db9cd9fffd9dab25f102ad30ed869451046e46d1187/griffe-2.0.2-py3-none-any.whl", hash = "sha256:2b31816460aee1996af26050a1fc6927a2e5936486856707f55508e4c9b5960b", size = 5141, upload-time = "2026-03-27T11:34:47.721Z" }, +] + +[[package]] +name = "griffecli" +version = "2.0.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "colorama" }, + { name = "griffelib" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/0d/0c/3a471b6e31951dce2360477420d0a8d1e00dea6cf33b70f3e8c3ab6e28e1/griffe-1.15.0.tar.gz", hash = "sha256:7726e3afd6f298fbc3696e67958803e7ac843c1cfe59734b6251a40cdbfb5eea", size = 424112, upload-time = "2025-11-10T15:03:15.52Z" } +sdist = { url = "https://files.pythonhosted.org/packages/79/e0/6a7d661d71bb043656a109b91d84a42b5342752542074ec83b16a6eb97f0/griffecli-2.0.2.tar.gz", hash = "sha256:40a1ad4181fc39685d025e119ae2c5b669acdc1f19b705fb9bf971f4e6f6dffb", size = 56281, upload-time = "2026-03-27T11:34:50.087Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2e/e8/90d93356c88ac34c20cb5edffca68138df55ca9bbd1a06eccfbcec8fdbe5/griffecli-2.0.2-py3-none-any.whl", hash = "sha256:0d44d39e59afa81e288a3e1c3bf352cc4fa537483326ac06b8bb6a51fd8303a0", size = 9500, upload-time = "2026-03-27T11:34:48.81Z" }, +] + +[[package]] +name = "griffelib" +version = "2.0.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/9d/82/74f4a3310cdabfbb10da554c3a672847f1ed33c6f61dd472681ce7f1fe67/griffelib-2.0.2.tar.gz", hash = "sha256:3cf20b3bc470e83763ffbf236e0076b1211bac1bc67de13daf494640f2de707e", size = 166461, upload-time = "2026-03-27T11:34:51.091Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/9c/83/3b1d03d36f224edded98e9affd0467630fc09d766c0e56fb1498cbb04a9b/griffe-1.15.0-py3-none-any.whl", hash = "sha256:6f6762661949411031f5fcda9593f586e6ce8340f0ba88921a0f2ef7a81eb9a3", size = 150705, upload-time = "2025-11-10T15:03:13.549Z" }, + { url = "https://files.pythonhosted.org/packages/11/8c/c9138d881c79aa0ea9ed83cbd58d5ca75624378b38cee225dcf5c42cc91f/griffelib-2.0.2-py3-none-any.whl", hash = "sha256:925c857658fb1ba40c0772c37acbc2ab650bd794d9c1b9726922e36ea4117ea1", size = 142357, upload-time = "2026-03-27T11:34:46.275Z" }, ] [[package]] @@ -758,44 +2707,75 @@ wheels = [ [[package]] name = "h5py" -version = "3.14.0" +version = "3.16.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "numpy" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/5d/57/dfb3c5c3f1bf5f5ef2e59a22dec4ff1f3d7408b55bfcefcfb0ea69ef21c6/h5py-3.14.0.tar.gz", hash = "sha256:2372116b2e0d5d3e5e705b7f663f7c8d96fa79a4052d250484ef91d24d6a08f4", size = 424323, upload-time = "2025-06-06T14:06:15.01Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/52/89/06cbb421e01dea2e338b3154326523c05d9698f89a01f9d9b65e1ec3fb18/h5py-3.14.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:24df6b2622f426857bda88683b16630014588a0e4155cba44e872eb011c4eaed", size = 3332522, upload-time = "2025-06-06T14:04:13.775Z" }, - { url = "https://files.pythonhosted.org/packages/c3/e7/6c860b002329e408348735bfd0459e7b12f712c83d357abeef3ef404eaa9/h5py-3.14.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6ff2389961ee5872de697054dd5a033b04284afc3fb52dc51d94561ece2c10c6", size = 2831051, upload-time = "2025-06-06T14:04:18.206Z" }, - { url = "https://files.pythonhosted.org/packages/fa/cd/3dd38cdb7cc9266dc4d85f27f0261680cb62f553f1523167ad7454e32b11/h5py-3.14.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:016e89d3be4c44f8d5e115fab60548e518ecd9efe9fa5c5324505a90773e6f03", size = 4324677, upload-time = "2025-06-06T14:04:23.438Z" }, - { url = "https://files.pythonhosted.org/packages/b1/45/e1a754dc7cd465ba35e438e28557119221ac89b20aaebef48282654e3dc7/h5py-3.14.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1223b902ef0b5d90bcc8a4778218d6d6cd0f5561861611eda59fa6c52b922f4d", size = 4557272, upload-time = "2025-06-06T14:04:28.863Z" }, - { url = "https://files.pythonhosted.org/packages/5c/06/f9506c1531645829d302c420851b78bb717af808dde11212c113585fae42/h5py-3.14.0-cp310-cp310-win_amd64.whl", hash = "sha256:852b81f71df4bb9e27d407b43071d1da330d6a7094a588efa50ef02553fa7ce4", size = 2866734, upload-time = "2025-06-06T14:04:33.5Z" }, - { url = "https://files.pythonhosted.org/packages/61/1b/ad24a8ce846cf0519695c10491e99969d9d203b9632c4fcd5004b1641c2e/h5py-3.14.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f30dbc58f2a0efeec6c8836c97f6c94afd769023f44e2bb0ed7b17a16ec46088", size = 3352382, upload-time = "2025-06-06T14:04:37.95Z" }, - { url = "https://files.pythonhosted.org/packages/36/5b/a066e459ca48b47cc73a5c668e9924d9619da9e3c500d9fb9c29c03858ec/h5py-3.14.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:543877d7f3d8f8a9828ed5df6a0b78ca3d8846244b9702e99ed0d53610b583a8", size = 2852492, upload-time = "2025-06-06T14:04:42.092Z" }, - { url = "https://files.pythonhosted.org/packages/08/0c/5e6aaf221557314bc15ba0e0da92e40b24af97ab162076c8ae009320a42b/h5py-3.14.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c497600c0496548810047257e36360ff551df8b59156d3a4181072eed47d8ad", size = 4298002, upload-time = "2025-06-06T14:04:47.106Z" }, - { url = "https://files.pythonhosted.org/packages/21/d4/d461649cafd5137088fb7f8e78fdc6621bb0c4ff2c090a389f68e8edc136/h5py-3.14.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:723a40ee6505bd354bfd26385f2dae7bbfa87655f4e61bab175a49d72ebfc06b", size = 4516618, upload-time = "2025-06-06T14:04:52.467Z" }, - { url = "https://files.pythonhosted.org/packages/db/0c/6c3f879a0f8e891625817637fad902da6e764e36919ed091dc77529004ac/h5py-3.14.0-cp311-cp311-win_amd64.whl", hash = "sha256:d2744b520440a996f2dae97f901caa8a953afc055db4673a993f2d87d7f38713", size = 2874888, upload-time = "2025-06-06T14:04:56.95Z" }, - { url = "https://files.pythonhosted.org/packages/3e/77/8f651053c1843391e38a189ccf50df7e261ef8cd8bfd8baba0cbe694f7c3/h5py-3.14.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:e0045115d83272090b0717c555a31398c2c089b87d212ceba800d3dc5d952e23", size = 3312740, upload-time = "2025-06-06T14:05:01.193Z" }, - { url = "https://files.pythonhosted.org/packages/ff/10/20436a6cf419b31124e59fefc78d74cb061ccb22213226a583928a65d715/h5py-3.14.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6da62509b7e1d71a7d110478aa25d245dd32c8d9a1daee9d2a42dba8717b047a", size = 2829207, upload-time = "2025-06-06T14:05:05.061Z" }, - { url = "https://files.pythonhosted.org/packages/3f/19/c8bfe8543bfdd7ccfafd46d8cfd96fce53d6c33e9c7921f375530ee1d39a/h5py-3.14.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:554ef0ced3571366d4d383427c00c966c360e178b5fb5ee5bb31a435c424db0c", size = 4708455, upload-time = "2025-06-06T14:05:11.528Z" }, - { url = "https://files.pythonhosted.org/packages/86/f9/f00de11c82c88bfc1ef22633557bfba9e271e0cb3189ad704183fc4a2644/h5py-3.14.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0cbd41f4e3761f150aa5b662df991868ca533872c95467216f2bec5fcad84882", size = 4929422, upload-time = "2025-06-06T14:05:18.399Z" }, - { url = "https://files.pythonhosted.org/packages/7a/6d/6426d5d456f593c94b96fa942a9b3988ce4d65ebaf57d7273e452a7222e8/h5py-3.14.0-cp312-cp312-win_amd64.whl", hash = "sha256:bf4897d67e613ecf5bdfbdab39a1158a64df105827da70ea1d90243d796d367f", size = 2862845, upload-time = "2025-06-06T14:05:23.699Z" }, + { name = "numpy", version = "1.26.4", source = { registry = "https://pypi.org/simple" }, marker = "extra == 'extra-16-inference-models-onnx-jp6-cu126' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and extra == 'extra-16-inference-models-falcon-perception') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "2.4.4", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and extra == 'extra-16-inference-models-falcon-perception') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/db/33/acd0ce6863b6c0d7735007df01815403f5589a21ff8c2e1ee2587a38f548/h5py-3.16.0.tar.gz", hash = "sha256:a0dbaad796840ccaa67a4c144a0d0c8080073c34c76d5a6941d6818678ef2738", size = 446526, upload-time = "2026-03-06T13:49:08.07Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3a/6b/231413e58a787a89b316bb0d1777da3c62257e4797e09afd8d17ad3549dc/h5py-3.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e06f864bedb2c8e7c1358e6c73af48519e317457c444d6f3d332bb4e8fa6d7d9", size = 3724137, upload-time = "2026-03-06T13:47:35.242Z" }, + { url = "https://files.pythonhosted.org/packages/74/f9/557ce3aad0fe8471fb5279bab0fc56ea473858a022c4ce8a0b8f303d64e9/h5py-3.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ec86d4fffd87a0f4cb3d5796ceb5a50123a2a6d99b43e616e5504e66a953eca3", size = 3090112, upload-time = "2026-03-06T13:47:37.634Z" }, + { url = "https://files.pythonhosted.org/packages/7a/f5/e15b3d0dc8a18e56409a839e6468d6fb589bc5207c917399c2e0706eeb44/h5py-3.16.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:86385ea895508220b8a7e45efa428aeafaa586bd737c7af9ee04661d8d84a10d", size = 4844847, upload-time = "2026-03-06T13:47:39.811Z" }, + { url = "https://files.pythonhosted.org/packages/cb/92/a8851d936547efe30cc0ce5245feac01f3ec6171f7899bc3f775c72030b3/h5py-3.16.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:8975273c2c5921c25700193b408e28d6bdd0111c37468b2d4e25dcec4cd1d84d", size = 5065352, upload-time = "2026-03-06T13:47:41.489Z" }, + { url = "https://files.pythonhosted.org/packages/2b/ae/f2adc5d0ca9626db3277a3d87516e124cbc5d0eea0bd79bc085702d04f2c/h5py-3.16.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:1677ad48b703f44efc9ea0c3ab284527f81bc4f318386aaaebc5fede6bbae56f", size = 4839173, upload-time = "2026-03-06T13:47:43.586Z" }, + { url = "https://files.pythonhosted.org/packages/64/0b/e0c8c69da1d8838da023a50cd3080eae5d475691f7636b35eff20bb6ef20/h5py-3.16.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7c4dd4cf5f0a4e36083f73172f6cfc25a5710789269547f132a20975bfe2434c", size = 5076216, upload-time = "2026-03-06T13:47:45.315Z" }, + { url = "https://files.pythonhosted.org/packages/66/35/d88fd6718832133c885004c61ceeeb24dbd6397ef877dbed6b3a64d6a286/h5py-3.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:bdef06507725b455fccba9c16529121a5e1fbf56aa375f7d9713d9e8ff42454d", size = 3183639, upload-time = "2026-03-06T13:47:47.041Z" }, + { url = "https://files.pythonhosted.org/packages/ba/95/a825894f3e45cbac7554c4e97314ce886b233a20033787eda755ca8fecc7/h5py-3.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:719439d14b83f74eeb080e9650a6c7aa6d0d9ea0ca7f804347b05fac6fbf18af", size = 3721663, upload-time = "2026-03-06T13:47:49.599Z" }, + { url = "https://files.pythonhosted.org/packages/bf/3b/38ff88b347c3e346cda1d3fc1b65a7aa75d40632228d8b8a5d7b58508c24/h5py-3.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c3f0a0e136f2e95dd0b67146abb6668af4f1a69c81ef8651a2d316e8e01de447", size = 3087630, upload-time = "2026-03-06T13:47:51.249Z" }, + { url = "https://files.pythonhosted.org/packages/98/a8/2594cef906aee761601eff842c7dc598bea2b394a3e1c00966832b8eeb7c/h5py-3.16.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:a6fbc5367d4046801f9b7db9191b31895f22f1c6df1f9987d667854cac493538", size = 4823472, upload-time = "2026-03-06T13:47:53.085Z" }, + { url = "https://files.pythonhosted.org/packages/52/a0/c1f604538ff6db22a0690be2dc44ab59178e115f63c917794e529356ab23/h5py-3.16.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:fb1720028d99040792bb2fb31facb8da44a6f29df7697e0b84f0d79aff2e9bd3", size = 5027150, upload-time = "2026-03-06T13:47:55.043Z" }, + { url = "https://files.pythonhosted.org/packages/2e/fd/301739083c2fc4fd89950f9bcfce75d6e14b40b0ca3d40e48a8993d1722c/h5py-3.16.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:314b6054fe0b1051c2b0cb2df5cbdab15622fb05e80f202e3b6a5eee0d6fe365", size = 4814544, upload-time = "2026-03-06T13:47:56.893Z" }, + { url = "https://files.pythonhosted.org/packages/4c/42/2193ed41ccee78baba8fcc0cff2c925b8b9ee3793305b23e1f22c20bf4c7/h5py-3.16.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ffbab2fedd6581f6aa31cf1639ca2cb86e02779de525667892ebf4cc9fd26434", size = 5034013, upload-time = "2026-03-06T13:47:59.01Z" }, + { url = "https://files.pythonhosted.org/packages/f7/20/e6c0ff62ca2ad1a396a34f4380bafccaaf8791ff8fccf3d995a1fc12d417/h5py-3.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:17d1f1630f92ad74494a9a7392ab25982ce2b469fc62da6074c0ce48366a2999", size = 3191673, upload-time = "2026-03-06T13:48:00.626Z" }, + { url = "https://files.pythonhosted.org/packages/f2/48/239cbe352ac4f2b8243a8e620fa1a2034635f633731493a7ff1ed71e8658/h5py-3.16.0-cp311-cp311-win_arm64.whl", hash = "sha256:85b9c49dd58dc44cf70af944784e2c2038b6f799665d0dcbbc812a26e0faa859", size = 2673834, upload-time = "2026-03-06T13:48:02.579Z" }, + { url = "https://files.pythonhosted.org/packages/c8/c0/5d4119dba94093bbafede500d3defd2f5eab7897732998c04b54021e530b/h5py-3.16.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c5313566f4643121a78503a473f0fb1e6dcc541d5115c44f05e037609c565c4d", size = 3685604, upload-time = "2026-03-06T13:48:04.198Z" }, + { url = "https://files.pythonhosted.org/packages/b0/42/c84efcc1d4caebafb1ecd8be4643f39c85c47a80fe254d92b8b43b1eadaf/h5py-3.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:42b012933a83e1a558c673176676a10ce2fd3759976a0fedee1e672d1e04fc9d", size = 3061940, upload-time = "2026-03-06T13:48:05.783Z" }, + { url = "https://files.pythonhosted.org/packages/89/84/06281c82d4d1686fde1ac6b0f307c50918f1c0151062445ab3b6fa5a921d/h5py-3.16.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:ff24039e2573297787c3063df64b60aab0591980ac898329a08b0320e0cf2527", size = 5198852, upload-time = "2026-03-06T13:48:07.482Z" }, + { url = "https://files.pythonhosted.org/packages/9e/e9/1a19e42cd43cc1365e127db6aae85e1c671da1d9a5d746f4d34a50edb577/h5py-3.16.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:dfc21898ff025f1e8e67e194965a95a8d4754f452f83454538f98f8a3fcb207e", size = 5405250, upload-time = "2026-03-06T13:48:09.628Z" }, + { url = "https://files.pythonhosted.org/packages/b7/8e/9790c1655eabeb85b92b1ecab7d7e62a2069e53baefd58c98f0909c7a948/h5py-3.16.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:698dd69291272642ffda44a0ecd6cd3bda5faf9621452d255f57ce91487b9794", size = 5190108, upload-time = "2026-03-06T13:48:11.26Z" }, + { url = "https://files.pythonhosted.org/packages/51/d7/ab693274f1bd7e8c5f9fdd6c7003a88d59bedeaf8752716a55f532924fbb/h5py-3.16.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2b2c02b0a160faed5fb33f1ba8a264a37ee240b22e049ecc827345d0d9043074", size = 5419216, upload-time = "2026-03-06T13:48:13.322Z" }, + { url = "https://files.pythonhosted.org/packages/03/c1/0976b235cf29ead553e22f2fb6385a8252b533715e00d0ae52ed7b900582/h5py-3.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:96b422019a1c8975c2d5dadcf61d4ba6f01c31f92bbde6e4649607885fe502d6", size = 3182868, upload-time = "2026-03-06T13:48:15.759Z" }, + { url = "https://files.pythonhosted.org/packages/14/d9/866b7e570b39070f92d47b0ff1800f0f8239b6f9e45f02363d7112336c1f/h5py-3.16.0-cp312-cp312-win_arm64.whl", hash = "sha256:39c2838fb1e8d97bcf1755e60ad1f3dd76a7b2a475928dc321672752678b96db", size = 2653286, upload-time = "2026-03-06T13:48:17.279Z" }, +] + +[[package]] +name = "hf-transfer" +version = "0.1.9" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/1a/eb/8fc64f40388c29ce8ce3b2b180a089d4d6b25b1d0d232d016704cb852104/hf_transfer-0.1.9.tar.gz", hash = "sha256:035572865dab29d17e783fbf1e84cf1cb24f3fcf8f1b17db1cfc7fdf139f02bf", size = 25201, upload-time = "2025-01-07T10:05:12.947Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/81/f5/461d2e5f307e5048289b1168d5c642ae3bb2504e88dff1a38b92ed990a21/hf_transfer-0.1.9-cp38-abi3-macosx_10_12_x86_64.whl", hash = "sha256:e66acf91df4a8b72f60223059df3003062a5ae111757187ed1a06750a30e911b", size = 1393046, upload-time = "2025-01-07T10:04:51.003Z" }, + { url = "https://files.pythonhosted.org/packages/41/ba/8d9fd9f1083525edfcb389c93738c802f3559cb749324090d7109c8bf4c2/hf_transfer-0.1.9-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:8669dbcc7a3e2e8d61d42cd24da9c50d57770bd74b445c65123291ca842a7e7a", size = 1348126, upload-time = "2025-01-07T10:04:45.712Z" }, + { url = "https://files.pythonhosted.org/packages/8e/a2/cd7885bc9959421065a6fae0fe67b6c55becdeda4e69b873e52976f9a9f0/hf_transfer-0.1.9-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8fd0167c4407a3bc4cdd0307e65ada2294ec04f1813d8a69a5243e379b22e9d8", size = 3728604, upload-time = "2025-01-07T10:04:14.173Z" }, + { url = "https://files.pythonhosted.org/packages/f6/2e/a072cf196edfeda3310c9a5ade0a0fdd785e6154b3ce24fc738c818da2a7/hf_transfer-0.1.9-cp38-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ee8b10afedcb75f71091bcc197c526a6ebf5c58bbbadb34fdeee6160f55f619f", size = 3064995, upload-time = "2025-01-07T10:04:18.663Z" }, + { url = "https://files.pythonhosted.org/packages/c2/84/aec9ef4c0fab93c1ea2b1badff38c78b4b2f86f0555b26d2051dbc920cde/hf_transfer-0.1.9-cp38-abi3-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5828057e313de59300dd1abb489444bc452efe3f479d3c55b31a8f680936ba42", size = 3580908, upload-time = "2025-01-07T10:04:32.834Z" }, + { url = "https://files.pythonhosted.org/packages/29/63/b560d39651a56603d64f1a0212d0472a44cbd965db2fa62b99d99cb981bf/hf_transfer-0.1.9-cp38-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fc6bd19e1cc177c66bdef15ef8636ad3bde79d5a4f608c158021153b4573509d", size = 3400839, upload-time = "2025-01-07T10:04:26.122Z" }, + { url = "https://files.pythonhosted.org/packages/d6/d8/f87ea6f42456254b48915970ed98e993110521e9263472840174d32c880d/hf_transfer-0.1.9-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdca9bfb89e6f8f281890cc61a8aff2d3cecaff7e1a4d275574d96ca70098557", size = 3552664, upload-time = "2025-01-07T10:04:40.123Z" }, + { url = "https://files.pythonhosted.org/packages/d6/56/1267c39b65fc8f4e2113b36297320f102718bf5799b544a6cbe22013aa1d/hf_transfer-0.1.9-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:89a23f58b7b7effbc047b8ca286f131b17728c99a9f972723323003ffd1bb916", size = 4073732, upload-time = "2025-01-07T10:04:55.624Z" }, + { url = "https://files.pythonhosted.org/packages/82/1a/9c748befbe3decf7cb415e34f8a0c3789a0a9c55910dea73d581e48c0ce5/hf_transfer-0.1.9-cp38-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:dc7fff1345980d6c0ebb92c811d24afa4b98b3e07ed070c8e38cc91fd80478c5", size = 3390096, upload-time = "2025-01-07T10:04:59.98Z" }, + { url = "https://files.pythonhosted.org/packages/72/85/4c03da147b6b4b7cb12e074d3d44eee28604a387ed0eaf7eaaead5069c57/hf_transfer-0.1.9-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:1a6bd16c667ebe89a069ca163060127a794fa3a3525292c900b8c8cc47985b0d", size = 3664743, upload-time = "2025-01-07T10:05:05.416Z" }, + { url = "https://files.pythonhosted.org/packages/e7/6e/e597b04f753f1b09e6893075d53a82a30c13855cbaa791402695b01e369f/hf_transfer-0.1.9-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:d2fde99d502093ade3ab1b53f80da18480e9902aa960dab7f74fb1b9e5bc5746", size = 3695243, upload-time = "2025-01-07T10:05:11.411Z" }, + { url = "https://files.pythonhosted.org/packages/09/89/d4e234727a26b2546c8fb70a276cd924260d60135f2165bf8b9ed67bb9a4/hf_transfer-0.1.9-cp38-abi3-win32.whl", hash = "sha256:435cc3cdc8524ce57b074032b8fd76eed70a4224d2091232fa6a8cef8fd6803e", size = 1086605, upload-time = "2025-01-07T10:05:18.873Z" }, + { url = "https://files.pythonhosted.org/packages/a1/14/f1e15b851d1c2af5b0b1a82bf8eb10bda2da62d98180220ba6fd8879bb5b/hf_transfer-0.1.9-cp38-abi3-win_amd64.whl", hash = "sha256:16f208fc678911c37e11aa7b586bc66a37d02e636208f18b6bc53d29b5df40ad", size = 1160240, upload-time = "2025-01-07T10:05:14.324Z" }, ] [[package]] name = "hf-xet" -version = "1.3.2" +version = "1.4.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/8b/cb/9bb543bd987ffa1ee48202cc96a756951b734b79a542335c566148ade36c/hf_xet-1.3.2.tar.gz", hash = "sha256:e130ee08984783d12717444e538587fa2119385e5bd8fc2bb9f930419b73a7af", size = 643646, upload-time = "2026-02-27T17:26:08.051Z" } +sdist = { url = "https://files.pythonhosted.org/packages/53/92/ec9ad04d0b5728dca387a45af7bc98fbb0d73b2118759f5f6038b61a57e8/hf_xet-1.4.3.tar.gz", hash = "sha256:8ddedb73c8c08928c793df2f3401ec26f95be7f7e516a7bee2fbb546f6676113", size = 670477, upload-time = "2026-03-31T22:40:07.874Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d8/28/dbb024e2e3907f6f3052847ca7d1a2f7a3972fafcd53ff79018977fcb3e4/hf_xet-1.3.2-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:f93b7595f1d8fefddfede775c18b5c9256757824f7f6832930b49858483cd56f", size = 3763961, upload-time = "2026-02-27T17:25:52.537Z" }, - { url = "https://files.pythonhosted.org/packages/e4/71/b99aed3823c9d1795e4865cf437d651097356a3f38c7d5877e4ac544b8e4/hf_xet-1.3.2-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:a85d3d43743174393afe27835bde0cd146e652b5fcfdbcd624602daef2ef3259", size = 3526171, upload-time = "2026-02-27T17:25:50.968Z" }, - { url = "https://files.pythonhosted.org/packages/9d/ca/907890ce6ef5598b5920514f255ed0a65f558f820515b18db75a51b2f878/hf_xet-1.3.2-cp37-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7c2a054a97c44e136b1f7f5a78f12b3efffdf2eed3abc6746fc5ea4b39511633", size = 4180750, upload-time = "2026-02-27T17:25:43.125Z" }, - { url = "https://files.pythonhosted.org/packages/8c/ad/bc7f41f87173d51d0bce497b171c4ee0cbde1eed2d7b4216db5d0ada9f50/hf_xet-1.3.2-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:06b724a361f670ae557836e57801b82c75b534812e351a87a2c739f77d1e0635", size = 3961035, upload-time = "2026-02-27T17:25:41.837Z" }, - { url = "https://files.pythonhosted.org/packages/73/38/600f4dda40c4a33133404d9fe644f1d35ff2d9babb4d0435c646c63dd107/hf_xet-1.3.2-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:305f5489d7241a47e0458ef49334be02411d1d0f480846363c1c8084ed9916f7", size = 4161378, upload-time = "2026-02-27T17:26:00.365Z" }, - { url = "https://files.pythonhosted.org/packages/00/b3/7bc1ff91d1ac18420b7ad1e169b618b27c00001b96310a89f8a9294fe509/hf_xet-1.3.2-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:06cdbde243c85f39a63b28e9034321399c507bcd5e7befdd17ed2ccc06dfe14e", size = 4398020, upload-time = "2026-02-27T17:26:03.977Z" }, - { url = "https://files.pythonhosted.org/packages/2b/0b/99bfd948a3ed3620ab709276df3ad3710dcea61976918cce8706502927af/hf_xet-1.3.2-cp37-abi3-win_amd64.whl", hash = "sha256:9298b47cce6037b7045ae41482e703c471ce36b52e73e49f71226d2e8e5685a1", size = 3641624, upload-time = "2026-02-27T17:26:13.542Z" }, - { url = "https://files.pythonhosted.org/packages/cc/02/9a6e4ca1f3f73a164c0cd48e41b3cc56585dcc37e809250de443d673266f/hf_xet-1.3.2-cp37-abi3-win_arm64.whl", hash = "sha256:83d8ec273136171431833a6957e8f3af496bee227a0fe47c7b8b39c106d1749a", size = 3503976, upload-time = "2026-02-27T17:26:12.123Z" }, + { url = "https://files.pythonhosted.org/packages/ac/9f/9c23e4a447b8f83120798f9279d0297a4d1360bdbf59ef49ebec78fe2545/hf_xet-1.4.3-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:d0da85329eaf196e03e90b84c2d0aca53bd4573d097a75f99609e80775f98025", size = 3805048, upload-time = "2026-03-31T22:39:53.105Z" }, + { url = "https://files.pythonhosted.org/packages/0b/f8/7aacb8e5f4a7899d39c787b5984e912e6c18b11be136ef13947d7a66d265/hf_xet-1.4.3-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:e23717ce4186b265f69afa66e6f0069fe7efbf331546f5c313d00e123dc84583", size = 3562178, upload-time = "2026-03-31T22:39:51.295Z" }, + { url = "https://files.pythonhosted.org/packages/df/9a/a24b26dc8a65f0ecc0fe5be981a19e61e7ca963b85e062c083f3a9100529/hf_xet-1.4.3-cp37-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fc360b70c815bf340ed56c7b8c63aacf11762a4b099b2fe2c9bd6d6068668c08", size = 4212320, upload-time = "2026-03-31T22:39:42.922Z" }, + { url = "https://files.pythonhosted.org/packages/53/60/46d493db155d2ee2801b71fb1b0fd67696359047fdd8caee2c914cc50c79/hf_xet-1.4.3-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:39f2d2e9654cd9b4319885733993807aab6de9dfbd34c42f0b78338d6617421f", size = 3991546, upload-time = "2026-03-31T22:39:41.335Z" }, + { url = "https://files.pythonhosted.org/packages/bc/f5/067363e1c96c6b17256910830d1b54099d06287e10f4ec6ec4e7e08371fc/hf_xet-1.4.3-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:49ad8a8cead2b56051aa84d7fce3e1335efe68df3cf6c058f22a65513885baac", size = 4193200, upload-time = "2026-03-31T22:40:01.936Z" }, + { url = "https://files.pythonhosted.org/packages/42/4b/53951592882d9c23080c7644542fda34a3813104e9e11fa1a7d82d419cb8/hf_xet-1.4.3-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:7716d62015477a70ea272d2d68cd7cad140f61c52ee452e133e139abfe2c17ba", size = 4429392, upload-time = "2026-03-31T22:40:03.492Z" }, + { url = "https://files.pythonhosted.org/packages/8a/21/75a6c175b4e79662ad8e62f46a40ce341d8d6b206b06b4320d07d55b188c/hf_xet-1.4.3-cp37-abi3-win_amd64.whl", hash = "sha256:6b591fcad34e272a5b02607485e4f2a1334aebf1bc6d16ce8eb1eb8978ac2021", size = 3677359, upload-time = "2026-03-31T22:40:13.619Z" }, + { url = "https://files.pythonhosted.org/packages/8a/7c/44314ecd0e89f8b2b51c9d9e5e7a60a9c1c82024ac471d415860557d3cd8/hf_xet-1.4.3-cp37-abi3-win_arm64.whl", hash = "sha256:7c2c7e20bcfcc946dc67187c203463f5e932e395845d098cc2a93f5b67ca0b47", size = 3533664, upload-time = "2026-03-31T22:40:12.152Z" }, ] [[package]] @@ -828,12 +2808,13 @@ wheels = [ [[package]] name = "huggingface-hub" -version = "1.5.0" +version = "1.9.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "filelock" }, - { name = "fsspec" }, - { name = "hf-xet", marker = "platform_machine == 'AMD64' or platform_machine == 'aarch64' or platform_machine == 'amd64' or platform_machine == 'arm64' or platform_machine == 'x86_64' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "fsspec", version = "2026.2.0", source = { registry = "https://pypi.org/simple" }, marker = "extra == 'extra-16-inference-models-falcon-perception' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "fsspec", version = "2026.3.0", source = { registry = "https://pypi.org/simple" }, marker = "extra == 'extra-16-inference-models-onnx-jp6-cu126' or extra == 'extra-16-inference-models-torch-cpu' or extra == 'extra-16-inference-models-torch-cu118' or extra == 'extra-16-inference-models-torch-cu124' or extra == 'extra-16-inference-models-torch-cu126' or extra == 'extra-16-inference-models-torch-jp6-cu126' or extra != 'extra-16-inference-models-falcon-perception' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130')" }, + { name = "hf-xet", marker = "platform_machine == 'AMD64' or platform_machine == 'aarch64' or platform_machine == 'amd64' or platform_machine == 'arm64' or platform_machine == 'x86_64' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, { name = "httpx" }, { name = "packaging" }, { name = "pyyaml" }, @@ -841,9 +2822,9 @@ dependencies = [ { name = "typer" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ae/76/b5efb3033d8499b17f9386beaf60f64c461798e1ee16d10bc9c0077beba5/huggingface_hub-1.5.0.tar.gz", hash = "sha256:f281838db29265880fb543de7a23b0f81d3504675de82044307ea3c6c62f799d", size = 695872, upload-time = "2026-02-26T15:35:32.745Z" } +sdist = { url = "https://files.pythonhosted.org/packages/cf/65/fb800d327bf25bf31b798dd08935d326d064ecb9b359059fecd91b3a98e8/huggingface_hub-1.9.2.tar.gz", hash = "sha256:8d09d080a186bd950a361bfc04b862dfb04d6a2b41d48e9ba1b37507cfd3f1e1", size = 750284, upload-time = "2026-04-08T08:43:11.127Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ec/74/2bc951622e2dbba1af9a460d93c51d15e458becd486e62c29cc0ccb08178/huggingface_hub-1.5.0-py3-none-any.whl", hash = "sha256:c9c0b3ab95a777fc91666111f3b3ede71c0cdced3614c553a64e98920585c4ee", size = 596261, upload-time = "2026-02-26T15:35:31.1Z" }, + { url = "https://files.pythonhosted.org/packages/57/d4/e33bf0b362810a9b96c5923e38908950d58ecb512db42e3730320c7f4a3a/huggingface_hub-1.9.2-py3-none-any.whl", hash = "sha256:e1e62ce237d4fbeca9f970aeb15176fbd503e04c25577bfd22f44aa7aa2b5243", size = 637349, upload-time = "2026-04-08T08:43:09.114Z" }, ] [[package]] @@ -851,7 +2832,7 @@ name = "humanfriendly" version = "10.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "pyreadline3", marker = "(sys_platform == 'win32' and extra == 'extra-16-inference-models-onnx-cpu') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-onnx-cu118') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "pyreadline3", marker = "(sys_platform == 'win32' and extra == 'extra-16-inference-models-onnx-cpu') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-onnx-cu118') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-onnx-cu12') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, ] sdist = { url = "https://files.pythonhosted.org/packages/cc/3f/2c29224acb2e2df4d2046e4c73ee2662023c58ff5b113c4c1adac0886c43/humanfriendly-10.0.tar.gz", hash = "sha256:6b0b831ce8f15f7300721aa49829fc4e83921a9a301cc7f606be6686a2288ddc", size = 360702, upload-time = "2021-09-17T21:40:43.31Z" } wheels = [ @@ -874,45 +2855,26 @@ wheels = [ [[package]] name = "idna" -version = "3.10" +version = "3.11" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490, upload-time = "2024-09-15T18:07:39.745Z" } +sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/0703ccc57f3a7233505399edb88de3cbd678da106337b9fcde432b65ed60/idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902", size = 194582, upload-time = "2025-10-12T14:55:20.501Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442, upload-time = "2024-09-15T18:07:37.964Z" }, + { url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" }, ] [[package]] name = "imageio" -version = "2.37.0" +version = "2.37.3" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "numpy" }, + { name = "numpy", version = "1.26.4", source = { registry = "https://pypi.org/simple" }, marker = "extra == 'extra-16-inference-models-onnx-jp6-cu126' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and extra == 'extra-16-inference-models-falcon-perception') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "2.4.4", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and extra == 'extra-16-inference-models-falcon-perception') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, { name = "pillow" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/0c/47/57e897fb7094afb2d26e8b2e4af9a45c7cf1a405acdeeca001fdf2c98501/imageio-2.37.0.tar.gz", hash = "sha256:71b57b3669666272c818497aebba2b4c5f20d5b37c81720e5e1a56d59c492996", size = 389963, upload-time = "2025-01-20T02:42:37.089Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/cb/bd/b394387b598ed84d8d0fa90611a90bee0adc2021820ad5729f7ced74a8e2/imageio-2.37.0-py3-none-any.whl", hash = "sha256:11efa15b87bc7871b61590326b2d635439acc321cf7f8ce996f812543ce10eed", size = 315796, upload-time = "2025-01-20T02:42:34.931Z" }, -] - -[[package]] -name = "importlib-metadata" -version = "8.7.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "zipp" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/76/66/650a33bd90f786193e4de4b3ad86ea60b53c89b669a5c7be931fac31cdb0/importlib_metadata-8.7.0.tar.gz", hash = "sha256:d13b81ad223b890aa16c5471f2ac3056cf76c5f10f82d6f9292f0b415f389000", size = 56641, upload-time = "2025-04-27T15:29:01.736Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/20/b0/36bd937216ec521246249be3bf9855081de4c5e06a0c9b4219dbeda50373/importlib_metadata-8.7.0-py3-none-any.whl", hash = "sha256:e5dd1551894c77868a30651cef00984d50e1002d06942a7101d34870c5f02afd", size = 27656, upload-time = "2025-04-27T15:29:00.214Z" }, -] - -[[package]] -name = "importlib-resources" -version = "6.5.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/cf/8c/f834fbf984f691b4f7ff60f50b514cc3de5cc08abfc3295564dd89c5e2e7/importlib_resources-6.5.2.tar.gz", hash = "sha256:185f87adef5bcc288449d98fb4fba07cea78bc036455dd44c5fc4a2fe78fed2c", size = 44693, upload-time = "2025-01-03T18:51:56.698Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b1/84/93bcd1300216ea50811cee96873b84a1bebf8d0489ffaf7f2a3756bab866/imageio-2.37.3.tar.gz", hash = "sha256:bbb37efbfc4c400fcd534b367b91fcd66d5da639aaa138034431a1c5e0a41451", size = 389673, upload-time = "2026-03-09T11:31:12.573Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a4/ed/1f1afb2e9e7f38a545d628f864d562a5ae64fe6f7a10e28ffb9b185b4e89/importlib_resources-6.5.2-py3-none-any.whl", hash = "sha256:789cfdc3ed28c78b67a06acb8126751ced69a3d5f79c095a98298cd8a760ccec", size = 37461, upload-time = "2025-01-03T18:51:54.306Z" }, + { url = "https://files.pythonhosted.org/packages/49/fa/391e437a34e55095173dca5f24070d89cbc233ff85bf1c29c93248c6588d/imageio-2.37.3-py3-none-any.whl", hash = "sha256:46f5bb8522cd421c0f5ae104d8268f569d856b29eb1a13b92829d1970f32c9f0", size = 317646, upload-time = "2026-03-09T11:31:10.771Z" }, ] [[package]] @@ -922,13 +2884,16 @@ source = { virtual = "." } dependencies = [ { name = "accelerate" }, { name = "backoff" }, - { name = "bitsandbytes", marker = "sys_platform != 'darwin' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "bitsandbytes", marker = "sys_platform != 'darwin' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, { name = "easyocr" }, { name = "einops" }, { name = "filelock" }, { name = "num2words" }, - { name = "numpy" }, - { name = "opencv-python" }, + { name = "numpy", version = "1.26.4", source = { registry = "https://pypi.org/simple" }, marker = "extra == 'extra-16-inference-models-onnx-jp6-cu126' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and extra == 'extra-16-inference-models-falcon-perception') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "2.4.4", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and extra == 'extra-16-inference-models-falcon-perception') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "opencv-python", version = "4.11.0.86", source = { registry = "https://pypi.org/simple" }, marker = "extra == 'extra-16-inference-models-onnx-jp6-cu126' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "opencv-python", version = "4.13.0.92", source = { registry = "https://pypi.org/simple" }, marker = "extra == 'extra-16-inference-models-falcon-perception' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, { name = "packaging" }, { name = "peft" }, { name = "pybase64" }, @@ -947,21 +2912,20 @@ dependencies = [ { name = "supervision" }, { name = "timm" }, { name = "tldextract" }, - { name = "torch", version = "2.6.0+cu124", source = { registry = "https://download.pytorch.org/whl/cu124" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torch", version = "2.7.1", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torch", version = "2.7.1+cpu", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torch", version = "2.7.1+cu118", source = { registry = "https://download.pytorch.org/whl/cu118" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torch", version = "2.7.1+cu128", source = { registry = "https://download.pytorch.org/whl/cu128" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torch", version = "2.8.0", source = { registry = "https://pypi.jetson-ai-lab.io/jp6/cu126/+simple" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torch", version = "2.8.0", source = { registry = "https://pypi.org/simple" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torchvision", version = "0.21.0+cu124", source = { registry = "https://download.pytorch.org/whl/cu124" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torchvision", version = "0.22.1", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torchvision", version = "0.22.1", source = { registry = "https://download.pytorch.org/whl/cu128" }, marker = "(platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torchvision", version = "0.22.1+cpu", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(platform_machine != 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torchvision", version = "0.22.1+cu118", source = { registry = "https://download.pytorch.org/whl/cu118" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torchvision", version = "0.22.1+cu128", source = { registry = "https://download.pytorch.org/whl/cu128" }, marker = "(platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torchvision", version = "0.23.0", source = { registry = "https://pypi.jetson-ai-lab.io/jp6/cu126/+simple" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torchvision", version = "0.23.0", source = { registry = "https://pypi.org/simple" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torch", version = "2.6.0+cu124", source = { registry = "https://download.pytorch.org/whl/cu124" }, marker = "extra == 'extra-16-inference-models-torch-cu124' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torch", version = "2.7.1+cu118", source = { registry = "https://download.pytorch.org/whl/cu118" }, marker = "extra == 'extra-16-inference-models-torch-cu118' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torch", version = "2.11.0", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torch", version = "2.11.0", source = { registry = "https://pypi.jetson-ai-lab.io/jp6/cu126/+simple" }, marker = "extra == 'extra-16-inference-models-torch-jp6-cu126' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126')" }, + { name = "torch", version = "2.11.0", source = { registry = "https://pypi.org/simple" }, marker = "extra == 'extra-16-inference-models-torch-cu126' or extra == 'extra-16-inference-models-torch-cu130' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torch", version = "2.11.0+cpu", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torch", version = "2.11.0+cu128", source = { registry = "https://download.pytorch.org/whl/cu128" }, marker = "extra == 'extra-16-inference-models-torch-cu128' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torchvision", version = "0.21.0+cu124", source = { registry = "https://download.pytorch.org/whl/cu124" }, marker = "extra == 'extra-16-inference-models-torch-cu124' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torchvision", version = "0.22.1+cu118", source = { registry = "https://download.pytorch.org/whl/cu118" }, marker = "extra == 'extra-16-inference-models-torch-cu118' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torchvision", version = "0.26.0", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torchvision", version = "0.26.0", source = { registry = "https://pypi.jetson-ai-lab.io/jp6/cu126/+simple" }, marker = "extra == 'extra-16-inference-models-torch-jp6-cu126' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126')" }, + { name = "torchvision", version = "0.26.0", source = { registry = "https://pypi.org/simple" }, marker = "extra == 'extra-16-inference-models-torch-cu126' or extra == 'extra-16-inference-models-torch-cu130' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torchvision", version = "0.26.0+cpu", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torchvision", version = "0.26.0+cu128", source = { registry = "https://download.pytorch.org/whl/cu128" }, marker = "extra == 'extra-16-inference-models-torch-cu128' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, { name = "transformers" }, ] @@ -977,6 +2941,9 @@ docs = [ { name = "mkdocstrings" }, { name = "mkdocstrings-python" }, ] +falcon-perception = [ + { name = "falcon-perception" }, +] mediapipe = [ { name = "rf-mediapipe" }, ] @@ -984,15 +2951,15 @@ onnx-cpu = [ { name = "onnxruntime" }, ] onnx-cu118 = [ - { name = "onnxruntime-gpu", version = "1.20.1", source = { registry = "https://aiinfra.pkgs.visualstudio.com/PublicPackages/_packaging/onnxruntime-cuda-11/pypi/simple/" }, marker = "platform_system != 'darwin' or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "pycuda", marker = "platform_system != 'darwin' or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "onnxruntime-gpu", version = "1.20.1", source = { registry = "https://aiinfra.pkgs.visualstudio.com/PublicPackages/_packaging/onnxruntime-cuda-11/pypi/simple/" }, marker = "platform_system != 'darwin' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "pycuda", marker = "platform_system != 'darwin' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, ] onnx-cu12 = [ - { name = "onnxruntime-gpu", version = "1.22.0", source = { registry = "https://pypi.org/simple" }, marker = "platform_system != 'darwin' or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "pycuda", marker = "platform_system != 'darwin' or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "onnxruntime-gpu", version = "1.22.0", source = { registry = "https://pypi.org/simple" }, marker = "platform_system != 'darwin' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "pycuda", marker = "platform_system != 'darwin' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, ] onnx-jp6-cu126 = [ - { name = "numpy", marker = "(platform_machine == 'aarch64' and sys_platform == 'linux') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "1.26.4", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine == 'aarch64' and sys_platform == 'linux') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, { name = "onnxruntime-gpu", version = "1.23.0", source = { registry = "https://pypi.jetson-ai-lab.io/jp6/cu126/+simple" }, marker = "(platform_machine == 'aarch64' and sys_platform == 'linux') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, { name = "pycuda", marker = "(platform_machine == 'aarch64' and sys_platform == 'linux') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, ] @@ -1003,10 +2970,10 @@ test = [ { name = "requests-mock" }, ] torch-cpu = [ - { name = "torch", version = "2.7.1", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torch", version = "2.7.1+cpu", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torchvision", version = "0.22.1", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torchvision", version = "0.22.1+cpu", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(platform_machine != 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torch", version = "2.11.0", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torch", version = "2.11.0+cpu", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torchvision", version = "0.26.0", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torchvision", version = "0.26.0+cpu", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, ] torch-cu118 = [ { name = "pycuda", marker = "platform_system != 'darwin' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126')" }, @@ -1020,30 +2987,29 @@ torch-cu124 = [ ] torch-cu126 = [ { name = "pycuda", marker = "platform_system != 'darwin' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126')" }, - { name = "torch", version = "2.8.0", source = { registry = "https://pypi.org/simple" } }, - { name = "torchvision", version = "0.23.0", source = { registry = "https://pypi.org/simple" } }, + { name = "torch", version = "2.11.0", source = { registry = "https://pypi.org/simple" } }, + { name = "torchvision", version = "0.26.0", source = { registry = "https://pypi.org/simple" } }, ] torch-cu128 = [ - { name = "pycuda", marker = "platform_system != 'darwin' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126')" }, - { name = "torch", version = "2.7.1+cu128", source = { registry = "https://download.pytorch.org/whl/cu128" } }, - { name = "torchvision", version = "0.22.1", source = { registry = "https://download.pytorch.org/whl/cu128" }, marker = "(platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torchvision", version = "0.22.1+cu128", source = { registry = "https://download.pytorch.org/whl/cu128" }, marker = "(platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "pycuda", marker = "platform_system != 'darwin' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126')" }, + { name = "torch", version = "2.11.0+cu128", source = { registry = "https://download.pytorch.org/whl/cu128" } }, + { name = "torchvision", version = "0.26.0+cu128", source = { registry = "https://download.pytorch.org/whl/cu128" } }, ] torch-cu130 = [ - { name = "pycuda", marker = "platform_system != 'darwin' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126')" }, - { name = "torch", version = "2.8.0", source = { registry = "https://pypi.org/simple" } }, - { name = "torchvision", version = "0.23.0", source = { registry = "https://pypi.org/simple" } }, + { name = "pycuda", marker = "platform_system != 'darwin' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126')" }, + { name = "torch", version = "2.11.0", source = { registry = "https://pypi.org/simple" } }, + { name = "torchvision", version = "0.26.0", source = { registry = "https://pypi.org/simple" } }, ] torch-jp6-cu126 = [ - { name = "numpy" }, + { name = "numpy", version = "1.26.4", source = { registry = "https://pypi.org/simple" } }, { name = "pycuda" }, - { name = "torch", version = "2.8.0", source = { registry = "https://pypi.jetson-ai-lab.io/jp6/cu126/+simple" } }, - { name = "torchvision", version = "0.23.0", source = { registry = "https://pypi.jetson-ai-lab.io/jp6/cu126/+simple" } }, + { name = "torch", version = "2.11.0", source = { registry = "https://pypi.jetson-ai-lab.io/jp6/cu126/+simple" } }, + { name = "torchvision", version = "0.26.0", source = { registry = "https://pypi.jetson-ai-lab.io/jp6/cu126/+simple" } }, ] trt10 = [ - { name = "pycuda", marker = "platform_system != 'darwin' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "tensorrt-cu12", marker = "sys_platform == 'linux' or sys_platform == 'win32' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "tensorrt-lean-cu12", marker = "sys_platform == 'linux' or sys_platform == 'win32' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "pycuda", marker = "platform_system != 'darwin' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "tensorrt-cu12", marker = "sys_platform == 'linux' or sys_platform == 'win32' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "tensorrt-lean-cu12", marker = "sys_platform == 'linux' or sys_platform == 'win32' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, ] [package.metadata] @@ -1053,6 +3019,7 @@ requires-dist = [ { name = "bitsandbytes", marker = "sys_platform != 'darwin'", specifier = ">=0.46.1,<0.48.0" }, { name = "easyocr", specifier = "~=1.7.2" }, { name = "einops", specifier = ">=0.7.0,<1.0.0" }, + { name = "falcon-perception", marker = "extra == 'falcon-perception'", specifier = "==1.0.0" }, { name = "filelock", specifier = ">=3.12.0,<4.0.0" }, { name = "mike", marker = "extra == 'docs'", specifier = ">=2.0.0" }, { name = "mkdocs-gen-files", marker = "extra == 'docs'", specifier = "~=0.6.0" }, @@ -1127,15 +3094,15 @@ requires-dist = [ { name = "torchvision", marker = "extra == 'torch-jp6-cu126'", index = "https://pypi.jetson-ai-lab.io/jp6/cu126/+simple", conflict = { package = "inference-models", extra = "torch-jp6-cu126" } }, { name = "transformers", specifier = ">=5.2.0,<5.3.0" }, ] -provides-extras = ["torch-cpu", "torch-cu118", "torch-cu124", "torch-cu126", "torch-cu128", "torch-cu130", "torch-jp6-cu126", "onnx-cpu", "onnx-cu118", "onnx-cu12", "onnx-jp6-cu126", "mediapipe", "trt10", "test", "docs"] +provides-extras = ["torch-cpu", "torch-cu118", "torch-cu124", "torch-cu126", "torch-cu128", "torch-cu130", "torch-jp6-cu126", "onnx-cpu", "onnx-cu118", "onnx-cu12", "onnx-jp6-cu126", "falcon-perception", "mediapipe", "trt10", "test", "docs"] [[package]] name = "iniconfig" -version = "2.1.0" +version = "2.3.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f2/97/ebf4da567aa6827c909642694d71c9fcf53e5b504f2d96afea02718862f3/iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7", size = 4793, upload-time = "2025-03-19T20:09:59.721Z" } +sdist = { url = "https://files.pythonhosted.org/packages/72/34/14ca021ce8e5dfedc35312d08ba8bf51fdd999c576889fc2c24cb97f4f10/iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730", size = 20503, upload-time = "2025-10-18T21:55:43.219Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050, upload-time = "2025-03-19T20:10:01.071Z" }, + { url = "https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484, upload-time = "2025-10-18T21:55:41.639Z" }, ] [[package]] @@ -1151,14 +3118,15 @@ sdist = { url = "https://files.pythonhosted.org/packages/72/73/b3d451dfc523756cf [[package]] name = "ipykernel" -version = "6.31.0" +version = "7.2.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "appnope", marker = "sys_platform == 'darwin' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "appnope", marker = "sys_platform == 'darwin' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, { name = "comm" }, { name = "debugpy" }, - { name = "ipython", version = "8.38.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "ipython", version = "9.9.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "ipython", version = "8.39.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "ipython", version = "9.10.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.11.*' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "ipython", version = "9.12.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.12' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, { name = "jupyter-client" }, { name = "jupyter-core" }, { name = "matplotlib-inline" }, @@ -1169,66 +3137,701 @@ dependencies = [ { name = "tornado" }, { name = "traitlets" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a5/1d/d5ba6edbfe6fae4c3105bca3a9c889563cc752c7f2de45e333164c7f4846/ipykernel-6.31.0.tar.gz", hash = "sha256:2372ce8bc1ff4f34e58cafed3a0feb2194b91fc7cad0fc72e79e47b45ee9e8f6", size = 167493, upload-time = "2025-10-20T11:42:39.948Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ca/8d/b68b728e2d06b9e0051019640a40a9eb7a88fcd82c2e1b5ce70bef5ff044/ipykernel-7.2.0.tar.gz", hash = "sha256:18ed160b6dee2cbb16e5f3575858bc19d8f1fe6046a9a680c708494ce31d909e", size = 176046, upload-time = "2026-02-06T16:43:27.403Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f6/d8/502954a4ec0efcf264f99b65b41c3c54e65a647d9f0d6f62cd02227d242c/ipykernel-6.31.0-py3-none-any.whl", hash = "sha256:abe5386f6ced727a70e0eb0cf1da801fa7c5fa6ff82147747d5a0406cd8c94af", size = 117003, upload-time = "2025-10-20T11:42:37.502Z" }, + { url = "https://files.pythonhosted.org/packages/82/b9/e73d5d9f405cba7706c539aa8b311b49d4c2f3d698d9c12f815231169c71/ipykernel-7.2.0-py3-none-any.whl", hash = "sha256:3bbd4420d2b3cc105cbdf3756bfc04500b1e52f090a90716851f3916c62e1661", size = 118788, upload-time = "2026-02-06T16:43:25.149Z" }, ] [[package]] name = "ipython" -version = "8.38.0" +version = "8.39.0" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "python_full_version < '3.11' and sys_platform == 'darwin'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin'", "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux'", - "(python_full_version < '3.11' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform != 'darwin' and sys_platform != 'linux')", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin'", ] dependencies = [ - { name = "colorama", marker = "(python_full_version < '3.11' and sys_platform == 'win32') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "decorator", marker = "python_full_version < '3.11' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "exceptiongroup", marker = "python_full_version < '3.11' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "jedi", marker = "python_full_version < '3.11' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "matplotlib-inline", marker = "python_full_version < '3.11' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "pexpect", marker = "(python_full_version < '3.11' and sys_platform != 'emscripten' and sys_platform != 'win32') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (sys_platform == 'emscripten' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'emscripten' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'emscripten' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'emscripten' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'emscripten' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'emscripten' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'emscripten' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'emscripten' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'emscripten' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'emscripten' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'emscripten' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'emscripten' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'emscripten' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'emscripten' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'emscripten' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'emscripten' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'emscripten' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'emscripten' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'emscripten' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'emscripten' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'emscripten' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'win32' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'win32' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'win32' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'win32' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'win32' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'win32' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'win32' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'win32' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'win32' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'win32' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'win32' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'win32' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'win32' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'win32' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'win32' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'win32' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'win32' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'win32' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'win32' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'win32' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'win32' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'emscripten' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'emscripten' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'emscripten' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'emscripten' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'emscripten' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'emscripten' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'emscripten' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'emscripten' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'emscripten' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'emscripten' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'emscripten' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'emscripten' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'emscripten' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'emscripten' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'emscripten' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'emscripten' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'emscripten' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'emscripten' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'emscripten' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'emscripten' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'emscripten' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'win32' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'win32' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'win32' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'win32' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'win32' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'win32' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'win32' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'win32' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'win32' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'win32' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'win32' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'win32' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'win32' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'win32' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'win32' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'win32' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'win32' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'win32' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'win32' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'win32' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'win32' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "prompt-toolkit", marker = "python_full_version < '3.11' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "pygments", marker = "python_full_version < '3.11' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "stack-data", marker = "python_full_version < '3.11' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "traitlets", marker = "python_full_version < '3.11' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "typing-extensions", marker = "python_full_version < '3.11' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "colorama", marker = "(python_full_version < '3.11' and sys_platform == 'win32') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "decorator", marker = "python_full_version < '3.11' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "exceptiongroup", marker = "python_full_version < '3.11' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "jedi", marker = "python_full_version < '3.11' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "matplotlib-inline", marker = "python_full_version < '3.11' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "pexpect", marker = "(python_full_version < '3.11' and sys_platform != 'emscripten' and sys_platform != 'win32') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "prompt-toolkit", marker = "python_full_version < '3.11' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "pygments", marker = "python_full_version < '3.11' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "stack-data", marker = "python_full_version < '3.11' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "traitlets", marker = "python_full_version < '3.11' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "typing-extensions", marker = "python_full_version < '3.11' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/e5/61/1810830e8b93c72dcd3c0f150c80a00c3deb229562d9423807ec92c3a539/ipython-8.38.0.tar.gz", hash = "sha256:9cfea8c903ce0867cc2f23199ed8545eb741f3a69420bfcf3743ad1cec856d39", size = 5513996, upload-time = "2026-01-05T10:59:06.901Z" } +sdist = { url = "https://files.pythonhosted.org/packages/40/18/f8598d287006885e7136451fdea0755af4ebcbfe342836f24deefaed1164/ipython-8.39.0.tar.gz", hash = "sha256:4110ae96012c379b8b6db898a07e186c40a2a1ef5d57a7fa83166047d9da7624", size = 5513971, upload-time = "2026-03-27T10:02:13.94Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/9f/df/db59624f4c71b39717c423409950ac3f2c8b2ce4b0aac843112c7fb3f721/ipython-8.38.0-py3-none-any.whl", hash = "sha256:750162629d800ac65bb3b543a14e7a74b0e88063eac9b92124d4b2aa3f6d8e86", size = 831813, upload-time = "2026-01-05T10:59:04.239Z" }, + { url = "https://files.pythonhosted.org/packages/c0/56/4cc7fc9e9e3f38fd324f24f8afe0ad8bb5fa41283f37f1aaf9de0612c968/ipython-8.39.0-py3-none-any.whl", hash = "sha256:bb3c51c4fa8148ab1dea07a79584d1c854e234ea44aa1283bcb37bc75054651f", size = 831849, upload-time = "2026-03-27T10:02:07.846Z" }, ] [[package]] name = "ipython" -version = "9.9.0" +version = "9.10.1" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "python_full_version >= '3.12' and sys_platform == 'darwin'", - "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux'", - "(python_full_version >= '3.12' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.12' and sys_platform != 'darwin' and sys_platform != 'linux')", - "python_full_version == '3.11.*' and sys_platform == 'darwin'", - "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux'", - "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.11.*' and sys_platform != 'darwin' and sys_platform != 'linux')", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", +] +dependencies = [ + { name = "colorama", marker = "(python_full_version == '3.11.*' and sys_platform == 'win32') or (python_full_version != '3.11.*' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (python_full_version != '3.11.*' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version != '3.11.*' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version != '3.11.*' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version != '3.11.*' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version != '3.11.*' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version != '3.11.*' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (python_full_version != '3.11.*' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (python_full_version != '3.11.*' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (python_full_version != '3.11.*' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (python_full_version != '3.11.*' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (python_full_version != '3.11.*' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (python_full_version != '3.11.*' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version != '3.11.*' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version != '3.11.*' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version != '3.11.*' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version != '3.11.*' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version != '3.11.*' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version != '3.11.*' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version != '3.11.*' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version != '3.11.*' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version != '3.11.*' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version != '3.11.*' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version != '3.11.*' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version != '3.11.*' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version != '3.11.*' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version != '3.11.*' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version != '3.11.*' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version != '3.11.*' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version != '3.11.*' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version != '3.11.*' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version != '3.11.*' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version != '3.11.*' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "decorator", marker = "python_full_version == '3.11.*' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "ipython-pygments-lexers", marker = "python_full_version == '3.11.*' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "jedi", marker = "python_full_version == '3.11.*' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "matplotlib-inline", marker = "python_full_version == '3.11.*' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "pexpect", marker = "(python_full_version == '3.11.*' and sys_platform != 'emscripten' and sys_platform != 'win32') or (python_full_version != '3.11.*' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (python_full_version != '3.11.*' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version != '3.11.*' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version != '3.11.*' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version != '3.11.*' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version != '3.11.*' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version != '3.11.*' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (python_full_version != '3.11.*' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (python_full_version != '3.11.*' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (python_full_version != '3.11.*' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (python_full_version != '3.11.*' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (python_full_version != '3.11.*' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (python_full_version != '3.11.*' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version != '3.11.*' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version != '3.11.*' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version != '3.11.*' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version != '3.11.*' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version != '3.11.*' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version != '3.11.*' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version != '3.11.*' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version != '3.11.*' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version != '3.11.*' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version != '3.11.*' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version != '3.11.*' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version != '3.11.*' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version != '3.11.*' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version != '3.11.*' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version != '3.11.*' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version != '3.11.*' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version != '3.11.*' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version != '3.11.*' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version != '3.11.*' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version != '3.11.*' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "prompt-toolkit", marker = "python_full_version == '3.11.*' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "pygments", marker = "python_full_version == '3.11.*' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "stack-data", marker = "python_full_version == '3.11.*' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "traitlets", marker = "python_full_version == '3.11.*' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "typing-extensions", marker = "python_full_version == '3.11.*' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c5/25/daae0e764047b0a2480c7bbb25d48f4f509b5818636562eeac145d06dfee/ipython-9.10.1.tar.gz", hash = "sha256:e170e9b2a44312484415bdb750492699bf329233b03f2557a9692cce6466ada4", size = 4426663, upload-time = "2026-03-27T09:53:26.244Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/01/09/ba70f8d662d5671687da55ad2cc0064cf795b15e1eea70907532202e7c97/ipython-9.10.1-py3-none-any.whl", hash = "sha256:82d18ae9fb9164ded080c71ef92a182ee35ee7db2395f67616034bebb020a232", size = 622827, upload-time = "2026-03-27T09:53:24.566Z" }, +] + +[[package]] +name = "ipython" +version = "9.12.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", ] dependencies = [ - { name = "colorama", marker = "(python_full_version >= '3.11' and sys_platform == 'win32') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "decorator", marker = "python_full_version >= '3.11' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "ipython-pygments-lexers", marker = "python_full_version >= '3.11' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "jedi", marker = "python_full_version >= '3.11' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "matplotlib-inline", marker = "python_full_version >= '3.11' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "pexpect", marker = "(python_full_version >= '3.11' and sys_platform != 'emscripten' and sys_platform != 'win32') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (sys_platform == 'emscripten' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'emscripten' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'emscripten' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'emscripten' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'emscripten' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'emscripten' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'emscripten' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'emscripten' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'emscripten' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'emscripten' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'emscripten' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'emscripten' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'emscripten' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'emscripten' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'emscripten' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'emscripten' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'emscripten' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'emscripten' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'emscripten' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'emscripten' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'emscripten' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'win32' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'win32' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'win32' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'win32' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'win32' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'win32' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'win32' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'win32' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'win32' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'win32' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'win32' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'win32' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'win32' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'win32' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'win32' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'win32' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'win32' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'win32' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'win32' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'win32' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'win32' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'emscripten' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'emscripten' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'emscripten' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'emscripten' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'emscripten' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'emscripten' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'emscripten' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'emscripten' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'emscripten' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'emscripten' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'emscripten' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'emscripten' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'emscripten' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'emscripten' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'emscripten' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'emscripten' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'emscripten' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'emscripten' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'emscripten' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'emscripten' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'emscripten' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'win32' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'win32' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'win32' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'win32' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'win32' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'win32' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'win32' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'win32' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'win32' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'win32' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'win32' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'win32' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'win32' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'win32' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'win32' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'win32' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'win32' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'win32' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'win32' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'win32' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'win32' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "prompt-toolkit", marker = "python_full_version >= '3.11' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "pygments", marker = "python_full_version >= '3.11' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "stack-data", marker = "python_full_version >= '3.11' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "traitlets", marker = "python_full_version >= '3.11' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "typing-extensions", marker = "python_full_version == '3.11.*' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "colorama", marker = "(python_full_version >= '3.12' and sys_platform == 'win32') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "decorator", marker = "python_full_version >= '3.12' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "ipython-pygments-lexers", marker = "python_full_version >= '3.12' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "jedi", marker = "python_full_version >= '3.12' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "matplotlib-inline", marker = "python_full_version >= '3.12' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "pexpect", marker = "(python_full_version >= '3.12' and sys_platform != 'emscripten' and sys_platform != 'win32') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "prompt-toolkit", marker = "python_full_version >= '3.12' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "pygments", marker = "python_full_version >= '3.12' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "stack-data", marker = "python_full_version >= '3.12' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "traitlets", marker = "python_full_version >= '3.12' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/46/dd/fb08d22ec0c27e73c8bc8f71810709870d51cadaf27b7ddd3f011236c100/ipython-9.9.0.tar.gz", hash = "sha256:48fbed1b2de5e2c7177eefa144aba7fcb82dac514f09b57e2ac9da34ddb54220", size = 4425043, upload-time = "2026-01-05T12:36:46.233Z" } +sdist = { url = "https://files.pythonhosted.org/packages/3a/73/7114f80a8f9cabdb13c27732dce24af945b2923dcab80723602f7c8bc2d8/ipython-9.12.0.tar.gz", hash = "sha256:01daa83f504b693ba523b5a407246cabde4eb4513285a3c6acaff11a66735ee4", size = 4428879, upload-time = "2026-03-27T09:42:45.312Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/86/92/162cfaee4ccf370465c5af1ce36a9eacec1becb552f2033bb3584e6f640a/ipython-9.9.0-py3-none-any.whl", hash = "sha256:b457fe9165df2b84e8ec909a97abcf2ed88f565970efba16b1f7229c283d252b", size = 621431, upload-time = "2026-01-05T12:36:44.669Z" }, + { url = "https://files.pythonhosted.org/packages/59/22/906c8108974c673ebef6356c506cebb6870d48cedea3c41e949e2dd556bb/ipython-9.12.0-py3-none-any.whl", hash = "sha256:0f2701e8ee86e117e37f50563205d36feaa259d2e08d4a6bc6b6d74b18ce128d", size = 625661, upload-time = "2026-03-27T09:42:42.831Z" }, ] [[package]] @@ -1236,7 +3839,7 @@ name = "ipython-pygments-lexers" version = "1.1.1" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "pygments", marker = "python_full_version >= '3.11' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "pygments", marker = "python_full_version >= '3.11' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, ] sdist = { url = "https://files.pythonhosted.org/packages/ef/4c/5dd1d8af08107f88c7f741ead7a40854b8ac24ddf9ae850afbcf698aa552/ipython_pygments_lexers-1.1.1.tar.gz", hash = "sha256:09c0138009e56b6854f9535736f4171d855c8c08a563a0dcd8022f78355c7e81", size = 8393, upload-time = "2025-01-17T11:24:34.505Z" } wheels = [ @@ -1247,26 +3850,677 @@ wheels = [ name = "jax" version = "0.6.2" source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin'", +] dependencies = [ - { name = "jaxlib" }, - { name = "ml-dtypes" }, - { name = "numpy" }, - { name = "opt-einsum" }, - { name = "scipy" }, + { name = "jaxlib", version = "0.6.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "ml-dtypes", marker = "python_full_version < '3.11' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "1.26.4", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and extra == 'extra-16-inference-models-falcon-perception') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "opt-einsum", marker = "python_full_version < '3.11' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "scipy", version = "1.15.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, ] sdist = { url = "https://files.pythonhosted.org/packages/cf/1e/267f59c8fb7f143c3f778c76cb7ef1389db3fd7e4540f04b9f42ca90764d/jax-0.6.2.tar.gz", hash = "sha256:a437d29038cbc8300334119692744704ca7941490867b9665406b7f90665cd96", size = 2334091, upload-time = "2025-06-17T23:10:27.186Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/31/a8/97ef0cbb7a17143ace2643d600a7b80d6705b2266fc31078229e406bdef2/jax-0.6.2-py3-none-any.whl", hash = "sha256:bb24a82dc60ccf704dcaf6dbd07d04957f68a6c686db19630dd75260d1fb788c", size = 2722396, upload-time = "2025-06-17T23:10:25.293Z" }, ] +[[package]] +name = "jax" +version = "0.7.1" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", +] +dependencies = [ + { name = "jaxlib", version = "0.7.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "ml-dtypes", marker = "python_full_version >= '3.11' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "1.26.4", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "2.4.4", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and extra == 'extra-16-inference-models-falcon-perception') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "opt-einsum", marker = "python_full_version >= '3.11' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "scipy", version = "1.17.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/bc/e8/b393ee314d3b042bd66b986d38e52f4e6046590399d916381265c20467d3/jax-0.7.1.tar.gz", hash = "sha256:118f56338c503361d2791f069d24339d8d44a8db442ed851d2e591222fb7a56d", size = 2428411, upload-time = "2025-08-20T15:55:46.098Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/83/81/793d78c91b0546b3b1f08e55fdd97437174171cd7d70e46098f1a4d94b7b/jax-0.7.1-py3-none-any.whl", hash = "sha256:056e576e0e58465506125699f48111ac8891cce4c9ebf034704c42b219dfd4a6", size = 2827341, upload-time = "2025-08-20T15:55:44.576Z" }, +] + [[package]] name = "jaxlib" version = "0.6.2" source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin'", +] dependencies = [ - { name = "ml-dtypes" }, - { name = "numpy" }, - { name = "scipy" }, + { name = "ml-dtypes", marker = "python_full_version < '3.11' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "1.26.4", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and extra == 'extra-16-inference-models-falcon-perception') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "scipy", version = "1.15.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, ] wheels = [ { url = "https://files.pythonhosted.org/packages/15/c5/41598634c99cbebba46e6777286fb76abc449d33d50aeae5d36128ca8803/jaxlib-0.6.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:da4601b2b5dc8c23d6afb293eacfb9aec4e1d1871cb2f29c5a151d103e73b0f8", size = 54298019, upload-time = "2025-06-17T23:10:36.916Z" }, @@ -1283,6 +4537,645 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/de/3a/06849113c844b86d20174df54735c84202ccf82cbd36d805f478c834418b/jaxlib-0.6.2-cp312-cp312-win_amd64.whl", hash = "sha256:921dbd4db214eba19a29ba9f2450d880e08b2b2c7b968f28cc89da3e62366af4", size = 57919603, upload-time = "2025-06-17T23:11:23.207Z" }, ] +[[package]] +name = "jaxlib" +version = "0.7.1" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", +] +dependencies = [ + { name = "ml-dtypes", marker = "python_full_version >= '3.11' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "1.26.4", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "2.4.4", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and extra == 'extra-16-inference-models-falcon-perception') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "scipy", version = "1.17.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/8b/af/5058d545e95f99a54289648f5430cc3c23263dd70a1391e7491f24ed328d/jaxlib-0.7.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3f32c3e4c167b7327c342e82d3df84079714ea0b43718be871d039999670b3c9", size = 57686934, upload-time = "2025-08-20T15:55:58.989Z" }, + { url = "https://files.pythonhosted.org/packages/e8/77/ef7f6cd03e699da7d9755f88741c29b3015654473fc9d5f906da19edcb47/jaxlib-0.7.1-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:9fb189c3b39470c4394ffcb18b71e47cffc5bf85e8fcb1e33692686b0c3e04dd", size = 85134885, upload-time = "2025-08-20T15:56:03.484Z" }, + { url = "https://files.pythonhosted.org/packages/4d/72/304018d46703f337787f010735f70d17212f86778fcba8bb5cf678f8e460/jaxlib-0.7.1-cp311-cp311-manylinux_2_27_x86_64.whl", hash = "sha256:eaf5f68f53bf4dcb93b6512538547667625588e4f3ccaeef048788fd18d8c0d5", size = 81147868, upload-time = "2025-08-20T15:56:07.214Z" }, + { url = "https://files.pythonhosted.org/packages/f7/b7/0f0df407518691099d659ba6e19db01320dfb58e49d80594eaddd57d77c1/jaxlib-0.7.1-cp311-cp311-win_amd64.whl", hash = "sha256:ab4510fbaeafac6c794ab335f23e71200d824c48f6a0ab20553db8deab8805c5", size = 61185342, upload-time = "2025-08-20T15:56:10.452Z" }, + { url = "https://files.pythonhosted.org/packages/ef/1f/10543d7a3f7e76dd4bbdc77134890ac2f41bc8570c565961464f6320009b/jaxlib-0.7.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:127c07c727703e5d59f84f655169bec849f4422e52f8546349cecc30a8a13e1d", size = 57682851, upload-time = "2025-08-20T15:56:13.395Z" }, + { url = "https://files.pythonhosted.org/packages/de/4d/76ee71959311fe3da9951aa6f55af8f98eb3572bb322f5a7c89faf7ab933/jaxlib-0.7.1-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:f0f1f52956b8c2518ab000a4d3d8c21be777e1d47f926ba03640e391061a41ee", size = 85133707, upload-time = "2025-08-20T15:56:16.908Z" }, + { url = "https://files.pythonhosted.org/packages/0d/50/e37d02e250f5feb755112ec95b1c012a36d48a99209277267037d100f630/jaxlib-0.7.1-cp312-cp312-manylinux_2_27_x86_64.whl", hash = "sha256:74abd3135797f82440dd3711a35cba16c430d1bba65474b85bb70e41733a52e9", size = 81156916, upload-time = "2025-08-20T15:56:20.41Z" }, + { url = "https://files.pythonhosted.org/packages/5a/97/c6c28dfe57cccffd85512615416024b52dd327d78270204caba9311e71f1/jaxlib-0.7.1-cp312-cp312-win_amd64.whl", hash = "sha256:c4023863b14f280516f24ecb7539b4300a3236ea81ed69ad82595beceed1ba1f", size = 61212445, upload-time = "2025-08-20T15:56:23.929Z" }, +] + [[package]] name = "jedi" version = "0.19.2" @@ -1374,7 +5267,7 @@ wheels = [ [[package]] name = "jupytext" -version = "1.18.1" +version = "1.19.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "markdown-it-py" }, @@ -1382,70 +5275,76 @@ dependencies = [ { name = "nbformat" }, { name = "packaging" }, { name = "pyyaml" }, - { name = "tomli", marker = "python_full_version < '3.11' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "tomli", marker = "python_full_version < '3.11' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/9b/5d/82a614a49493fa84b2019a3e03020a8b9927208ae177b81f7e0b30330c82/jupytext-1.18.1.tar.gz", hash = "sha256:5c0962ca8d222db45cbe1848b4805dbbe3ddb957603fc96651b6cd7fd403fafb", size = 4270997, upload-time = "2025-10-19T15:06:30.992Z" } +sdist = { url = "https://files.pythonhosted.org/packages/13/a5/80c02f307c8ce863cb33e27daf049315e9d96979e14eead700923b5ec9cc/jupytext-1.19.1.tar.gz", hash = "sha256:82587c07e299173c70ed5e8ec7e75183edf1be289ed518bab49ad0d4e3d5f433", size = 4307829, upload-time = "2026-01-25T21:35:13.276Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/bd/0d/2d240e7098e0cafba4d25e9530e7596b1bb1bd4476e41b10346bcaaa36d6/jupytext-1.18.1-py3-none-any.whl", hash = "sha256:24f999400726a1c658beae55e15fdd2a6255ab1a418697864cd779874e6011ab", size = 167143, upload-time = "2025-10-19T15:06:28.975Z" }, + { url = "https://files.pythonhosted.org/packages/16/5a/736dd2f4535dbf3bf26523f9158c011389ef88dd06ec2eef67fd744f1c7b/jupytext-1.19.1-py3-none-any.whl", hash = "sha256:d8975035155d034bdfde5c0c37891425314b7ea8d3a6c4b5d18c294348714cd9", size = 170478, upload-time = "2026-01-25T21:35:11.17Z" }, ] [[package]] name = "kiwisolver" -version = "1.4.8" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/82/59/7c91426a8ac292e1cdd53a63b6d9439abd573c875c3f92c146767dd33faf/kiwisolver-1.4.8.tar.gz", hash = "sha256:23d5f023bdc8c7e54eb65f03ca5d5bb25b601eac4d7f1a042888a1f45237987e", size = 97538, upload-time = "2024-12-24T18:30:51.519Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/47/5f/4d8e9e852d98ecd26cdf8eaf7ed8bc33174033bba5e07001b289f07308fd/kiwisolver-1.4.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:88c6f252f6816a73b1f8c904f7bbe02fd67c09a69f7cb8a0eecdbf5ce78e63db", size = 124623, upload-time = "2024-12-24T18:28:17.687Z" }, - { url = "https://files.pythonhosted.org/packages/1d/70/7f5af2a18a76fe92ea14675f8bd88ce53ee79e37900fa5f1a1d8e0b42998/kiwisolver-1.4.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c72941acb7b67138f35b879bbe85be0f6c6a70cab78fe3ef6db9c024d9223e5b", size = 66720, upload-time = "2024-12-24T18:28:19.158Z" }, - { url = "https://files.pythonhosted.org/packages/c6/13/e15f804a142353aefd089fadc8f1d985561a15358c97aca27b0979cb0785/kiwisolver-1.4.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ce2cf1e5688edcb727fdf7cd1bbd0b6416758996826a8be1d958f91880d0809d", size = 65413, upload-time = "2024-12-24T18:28:20.064Z" }, - { url = "https://files.pythonhosted.org/packages/ce/6d/67d36c4d2054e83fb875c6b59d0809d5c530de8148846b1370475eeeece9/kiwisolver-1.4.8-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c8bf637892dc6e6aad2bc6d4d69d08764166e5e3f69d469e55427b6ac001b19d", size = 1650826, upload-time = "2024-12-24T18:28:21.203Z" }, - { url = "https://files.pythonhosted.org/packages/de/c6/7b9bb8044e150d4d1558423a1568e4f227193662a02231064e3824f37e0a/kiwisolver-1.4.8-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:034d2c891f76bd3edbdb3ea11140d8510dca675443da7304205a2eaa45d8334c", size = 1628231, upload-time = "2024-12-24T18:28:23.851Z" }, - { url = "https://files.pythonhosted.org/packages/b6/38/ad10d437563063eaaedbe2c3540a71101fc7fb07a7e71f855e93ea4de605/kiwisolver-1.4.8-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d47b28d1dfe0793d5e96bce90835e17edf9a499b53969b03c6c47ea5985844c3", size = 1408938, upload-time = "2024-12-24T18:28:26.687Z" }, - { url = "https://files.pythonhosted.org/packages/52/ce/c0106b3bd7f9e665c5f5bc1e07cc95b5dabd4e08e3dad42dbe2faad467e7/kiwisolver-1.4.8-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eb158fe28ca0c29f2260cca8c43005329ad58452c36f0edf298204de32a9a3ed", size = 1422799, upload-time = "2024-12-24T18:28:30.538Z" }, - { url = "https://files.pythonhosted.org/packages/d0/87/efb704b1d75dc9758087ba374c0f23d3254505edaedd09cf9d247f7878b9/kiwisolver-1.4.8-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5536185fce131780ebd809f8e623bf4030ce1b161353166c49a3c74c287897f", size = 1354362, upload-time = "2024-12-24T18:28:32.943Z" }, - { url = "https://files.pythonhosted.org/packages/eb/b3/fd760dc214ec9a8f208b99e42e8f0130ff4b384eca8b29dd0efc62052176/kiwisolver-1.4.8-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:369b75d40abedc1da2c1f4de13f3482cb99e3237b38726710f4a793432b1c5ff", size = 2222695, upload-time = "2024-12-24T18:28:35.641Z" }, - { url = "https://files.pythonhosted.org/packages/a2/09/a27fb36cca3fc01700687cc45dae7a6a5f8eeb5f657b9f710f788748e10d/kiwisolver-1.4.8-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:641f2ddf9358c80faa22e22eb4c9f54bd3f0e442e038728f500e3b978d00aa7d", size = 2370802, upload-time = "2024-12-24T18:28:38.357Z" }, - { url = "https://files.pythonhosted.org/packages/3d/c3/ba0a0346db35fe4dc1f2f2cf8b99362fbb922d7562e5f911f7ce7a7b60fa/kiwisolver-1.4.8-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:d561d2d8883e0819445cfe58d7ddd673e4015c3c57261d7bdcd3710d0d14005c", size = 2334646, upload-time = "2024-12-24T18:28:40.941Z" }, - { url = "https://files.pythonhosted.org/packages/41/52/942cf69e562f5ed253ac67d5c92a693745f0bed3c81f49fc0cbebe4d6b00/kiwisolver-1.4.8-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:1732e065704b47c9afca7ffa272f845300a4eb959276bf6970dc07265e73b605", size = 2467260, upload-time = "2024-12-24T18:28:42.273Z" }, - { url = "https://files.pythonhosted.org/packages/32/26/2d9668f30d8a494b0411d4d7d4ea1345ba12deb6a75274d58dd6ea01e951/kiwisolver-1.4.8-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:bcb1ebc3547619c3b58a39e2448af089ea2ef44b37988caf432447374941574e", size = 2288633, upload-time = "2024-12-24T18:28:44.87Z" }, - { url = "https://files.pythonhosted.org/packages/98/99/0dd05071654aa44fe5d5e350729961e7bb535372935a45ac89a8924316e6/kiwisolver-1.4.8-cp310-cp310-win_amd64.whl", hash = "sha256:89c107041f7b27844179ea9c85d6da275aa55ecf28413e87624d033cf1f6b751", size = 71885, upload-time = "2024-12-24T18:28:47.346Z" }, - { url = "https://files.pythonhosted.org/packages/6c/fc/822e532262a97442989335394d441cd1d0448c2e46d26d3e04efca84df22/kiwisolver-1.4.8-cp310-cp310-win_arm64.whl", hash = "sha256:b5773efa2be9eb9fcf5415ea3ab70fc785d598729fd6057bea38d539ead28271", size = 65175, upload-time = "2024-12-24T18:28:49.651Z" }, - { url = "https://files.pythonhosted.org/packages/da/ed/c913ee28936c371418cb167b128066ffb20bbf37771eecc2c97edf8a6e4c/kiwisolver-1.4.8-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a4d3601908c560bdf880f07d94f31d734afd1bb71e96585cace0e38ef44c6d84", size = 124635, upload-time = "2024-12-24T18:28:51.826Z" }, - { url = "https://files.pythonhosted.org/packages/4c/45/4a7f896f7467aaf5f56ef093d1f329346f3b594e77c6a3c327b2d415f521/kiwisolver-1.4.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:856b269c4d28a5c0d5e6c1955ec36ebfd1651ac00e1ce0afa3e28da95293b561", size = 66717, upload-time = "2024-12-24T18:28:54.256Z" }, - { url = "https://files.pythonhosted.org/packages/5f/b4/c12b3ac0852a3a68f94598d4c8d569f55361beef6159dce4e7b624160da2/kiwisolver-1.4.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c2b9a96e0f326205af81a15718a9073328df1173a2619a68553decb7097fd5d7", size = 65413, upload-time = "2024-12-24T18:28:55.184Z" }, - { url = "https://files.pythonhosted.org/packages/a9/98/1df4089b1ed23d83d410adfdc5947245c753bddfbe06541c4aae330e9e70/kiwisolver-1.4.8-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c5020c83e8553f770cb3b5fc13faac40f17e0b205bd237aebd21d53d733adb03", size = 1343994, upload-time = "2024-12-24T18:28:57.493Z" }, - { url = "https://files.pythonhosted.org/packages/8d/bf/b4b169b050c8421a7c53ea1ea74e4ef9c335ee9013216c558a047f162d20/kiwisolver-1.4.8-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dace81d28c787956bfbfbbfd72fdcef014f37d9b48830829e488fdb32b49d954", size = 1434804, upload-time = "2024-12-24T18:29:00.077Z" }, - { url = "https://files.pythonhosted.org/packages/66/5a/e13bd341fbcf73325ea60fdc8af752addf75c5079867af2e04cc41f34434/kiwisolver-1.4.8-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:11e1022b524bd48ae56c9b4f9296bce77e15a2e42a502cceba602f804b32bb79", size = 1450690, upload-time = "2024-12-24T18:29:01.401Z" }, - { url = "https://files.pythonhosted.org/packages/9b/4f/5955dcb376ba4a830384cc6fab7d7547bd6759fe75a09564910e9e3bb8ea/kiwisolver-1.4.8-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b9b4d2892fefc886f30301cdd80debd8bb01ecdf165a449eb6e78f79f0fabd6", size = 1376839, upload-time = "2024-12-24T18:29:02.685Z" }, - { url = "https://files.pythonhosted.org/packages/3a/97/5edbed69a9d0caa2e4aa616ae7df8127e10f6586940aa683a496c2c280b9/kiwisolver-1.4.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a96c0e790ee875d65e340ab383700e2b4891677b7fcd30a699146f9384a2bb0", size = 1435109, upload-time = "2024-12-24T18:29:04.113Z" }, - { url = "https://files.pythonhosted.org/packages/13/fc/e756382cb64e556af6c1809a1bbb22c141bbc2445049f2da06b420fe52bf/kiwisolver-1.4.8-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:23454ff084b07ac54ca8be535f4174170c1094a4cff78fbae4f73a4bcc0d4dab", size = 2245269, upload-time = "2024-12-24T18:29:05.488Z" }, - { url = "https://files.pythonhosted.org/packages/76/15/e59e45829d7f41c776d138245cabae6515cb4eb44b418f6d4109c478b481/kiwisolver-1.4.8-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:87b287251ad6488e95b4f0b4a79a6d04d3ea35fde6340eb38fbd1ca9cd35bbbc", size = 2393468, upload-time = "2024-12-24T18:29:06.79Z" }, - { url = "https://files.pythonhosted.org/packages/e9/39/483558c2a913ab8384d6e4b66a932406f87c95a6080112433da5ed668559/kiwisolver-1.4.8-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:b21dbe165081142b1232a240fc6383fd32cdd877ca6cc89eab93e5f5883e1c25", size = 2355394, upload-time = "2024-12-24T18:29:08.24Z" }, - { url = "https://files.pythonhosted.org/packages/01/aa/efad1fbca6570a161d29224f14b082960c7e08268a133fe5dc0f6906820e/kiwisolver-1.4.8-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:768cade2c2df13db52475bd28d3a3fac8c9eff04b0e9e2fda0f3760f20b3f7fc", size = 2490901, upload-time = "2024-12-24T18:29:09.653Z" }, - { url = "https://files.pythonhosted.org/packages/c9/4f/15988966ba46bcd5ab9d0c8296914436720dd67fca689ae1a75b4ec1c72f/kiwisolver-1.4.8-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d47cfb2650f0e103d4bf68b0b5804c68da97272c84bb12850d877a95c056bd67", size = 2312306, upload-time = "2024-12-24T18:29:12.644Z" }, - { url = "https://files.pythonhosted.org/packages/2d/27/bdf1c769c83f74d98cbc34483a972f221440703054894a37d174fba8aa68/kiwisolver-1.4.8-cp311-cp311-win_amd64.whl", hash = "sha256:ed33ca2002a779a2e20eeb06aea7721b6e47f2d4b8a8ece979d8ba9e2a167e34", size = 71966, upload-time = "2024-12-24T18:29:14.089Z" }, - { url = "https://files.pythonhosted.org/packages/4a/c9/9642ea855604aeb2968a8e145fc662edf61db7632ad2e4fb92424be6b6c0/kiwisolver-1.4.8-cp311-cp311-win_arm64.whl", hash = "sha256:16523b40aab60426ffdebe33ac374457cf62863e330a90a0383639ce14bf44b2", size = 65311, upload-time = "2024-12-24T18:29:15.892Z" }, - { url = "https://files.pythonhosted.org/packages/fc/aa/cea685c4ab647f349c3bc92d2daf7ae34c8e8cf405a6dcd3a497f58a2ac3/kiwisolver-1.4.8-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:d6af5e8815fd02997cb6ad9bbed0ee1e60014438ee1a5c2444c96f87b8843502", size = 124152, upload-time = "2024-12-24T18:29:16.85Z" }, - { url = "https://files.pythonhosted.org/packages/c5/0b/8db6d2e2452d60d5ebc4ce4b204feeb16176a851fd42462f66ade6808084/kiwisolver-1.4.8-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:bade438f86e21d91e0cf5dd7c0ed00cda0f77c8c1616bd83f9fc157fa6760d31", size = 66555, upload-time = "2024-12-24T18:29:19.146Z" }, - { url = "https://files.pythonhosted.org/packages/60/26/d6a0db6785dd35d3ba5bf2b2df0aedc5af089962c6eb2cbf67a15b81369e/kiwisolver-1.4.8-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b83dc6769ddbc57613280118fb4ce3cd08899cc3369f7d0e0fab518a7cf37fdb", size = 65067, upload-time = "2024-12-24T18:29:20.096Z" }, - { url = "https://files.pythonhosted.org/packages/c9/ed/1d97f7e3561e09757a196231edccc1bcf59d55ddccefa2afc9c615abd8e0/kiwisolver-1.4.8-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:111793b232842991be367ed828076b03d96202c19221b5ebab421ce8bcad016f", size = 1378443, upload-time = "2024-12-24T18:29:22.843Z" }, - { url = "https://files.pythonhosted.org/packages/29/61/39d30b99954e6b46f760e6289c12fede2ab96a254c443639052d1b573fbc/kiwisolver-1.4.8-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:257af1622860e51b1a9d0ce387bf5c2c4f36a90594cb9514f55b074bcc787cfc", size = 1472728, upload-time = "2024-12-24T18:29:24.463Z" }, - { url = "https://files.pythonhosted.org/packages/0c/3e/804163b932f7603ef256e4a715e5843a9600802bb23a68b4e08c8c0ff61d/kiwisolver-1.4.8-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:69b5637c3f316cab1ec1c9a12b8c5f4750a4c4b71af9157645bf32830e39c03a", size = 1478388, upload-time = "2024-12-24T18:29:25.776Z" }, - { url = "https://files.pythonhosted.org/packages/8a/9e/60eaa75169a154700be74f875a4d9961b11ba048bef315fbe89cb6999056/kiwisolver-1.4.8-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:782bb86f245ec18009890e7cb8d13a5ef54dcf2ebe18ed65f795e635a96a1c6a", size = 1413849, upload-time = "2024-12-24T18:29:27.202Z" }, - { url = "https://files.pythonhosted.org/packages/bc/b3/9458adb9472e61a998c8c4d95cfdfec91c73c53a375b30b1428310f923e4/kiwisolver-1.4.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc978a80a0db3a66d25767b03688f1147a69e6237175c0f4ffffaaedf744055a", size = 1475533, upload-time = "2024-12-24T18:29:28.638Z" }, - { url = "https://files.pythonhosted.org/packages/e4/7a/0a42d9571e35798de80aef4bb43a9b672aa7f8e58643d7bd1950398ffb0a/kiwisolver-1.4.8-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:36dbbfd34838500a31f52c9786990d00150860e46cd5041386f217101350f0d3", size = 2268898, upload-time = "2024-12-24T18:29:30.368Z" }, - { url = "https://files.pythonhosted.org/packages/d9/07/1255dc8d80271400126ed8db35a1795b1a2c098ac3a72645075d06fe5c5d/kiwisolver-1.4.8-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:eaa973f1e05131de5ff3569bbba7f5fd07ea0595d3870ed4a526d486fe57fa1b", size = 2425605, upload-time = "2024-12-24T18:29:33.151Z" }, - { url = "https://files.pythonhosted.org/packages/84/df/5a3b4cf13780ef6f6942df67b138b03b7e79e9f1f08f57c49957d5867f6e/kiwisolver-1.4.8-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:a66f60f8d0c87ab7f59b6fb80e642ebb29fec354a4dfad687ca4092ae69d04f4", size = 2375801, upload-time = "2024-12-24T18:29:34.584Z" }, - { url = "https://files.pythonhosted.org/packages/8f/10/2348d068e8b0f635c8c86892788dac7a6b5c0cb12356620ab575775aad89/kiwisolver-1.4.8-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:858416b7fb777a53f0c59ca08190ce24e9abbd3cffa18886a5781b8e3e26f65d", size = 2520077, upload-time = "2024-12-24T18:29:36.138Z" }, - { url = "https://files.pythonhosted.org/packages/32/d8/014b89fee5d4dce157d814303b0fce4d31385a2af4c41fed194b173b81ac/kiwisolver-1.4.8-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:085940635c62697391baafaaeabdf3dd7a6c3643577dde337f4d66eba021b2b8", size = 2338410, upload-time = "2024-12-24T18:29:39.991Z" }, - { url = "https://files.pythonhosted.org/packages/bd/72/dfff0cc97f2a0776e1c9eb5bef1ddfd45f46246c6533b0191887a427bca5/kiwisolver-1.4.8-cp312-cp312-win_amd64.whl", hash = "sha256:01c3d31902c7db5fb6182832713d3b4122ad9317c2c5877d0539227d96bb2e50", size = 71853, upload-time = "2024-12-24T18:29:42.006Z" }, - { url = "https://files.pythonhosted.org/packages/dc/85/220d13d914485c0948a00f0b9eb419efaf6da81b7d72e88ce2391f7aed8d/kiwisolver-1.4.8-cp312-cp312-win_arm64.whl", hash = "sha256:a3c44cb68861de93f0c4a8175fbaa691f0aa22550c331fefef02b618a9dcb476", size = 65424, upload-time = "2024-12-24T18:29:44.38Z" }, - { url = "https://files.pythonhosted.org/packages/1f/f9/ae81c47a43e33b93b0a9819cac6723257f5da2a5a60daf46aa5c7226ea85/kiwisolver-1.4.8-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:e7a019419b7b510f0f7c9dceff8c5eae2392037eae483a7f9162625233802b0a", size = 60403, upload-time = "2024-12-24T18:30:41.372Z" }, - { url = "https://files.pythonhosted.org/packages/58/ca/f92b5cb6f4ce0c1ebfcfe3e2e42b96917e16f7090e45b21102941924f18f/kiwisolver-1.4.8-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:286b18e86682fd2217a48fc6be6b0f20c1d0ed10958d8dc53453ad58d7be0bf8", size = 58657, upload-time = "2024-12-24T18:30:42.392Z" }, - { url = "https://files.pythonhosted.org/packages/80/28/ae0240f732f0484d3a4dc885d055653c47144bdf59b670aae0ec3c65a7c8/kiwisolver-1.4.8-pp310-pypy310_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4191ee8dfd0be1c3666ccbac178c5a05d5f8d689bbe3fc92f3c4abec817f8fe0", size = 84948, upload-time = "2024-12-24T18:30:44.703Z" }, - { url = "https://files.pythonhosted.org/packages/5d/eb/78d50346c51db22c7203c1611f9b513075f35c4e0e4877c5dde378d66043/kiwisolver-1.4.8-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7cd2785b9391f2873ad46088ed7599a6a71e762e1ea33e87514b1a441ed1da1c", size = 81186, upload-time = "2024-12-24T18:30:45.654Z" }, - { url = "https://files.pythonhosted.org/packages/43/f8/7259f18c77adca88d5f64f9a522792e178b2691f3748817a8750c2d216ef/kiwisolver-1.4.8-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c07b29089b7ba090b6f1a669f1411f27221c3662b3a1b7010e67b59bb5a6f10b", size = 80279, upload-time = "2024-12-24T18:30:47.951Z" }, - { url = "https://files.pythonhosted.org/packages/3a/1d/50ad811d1c5dae091e4cf046beba925bcae0a610e79ae4c538f996f63ed5/kiwisolver-1.4.8-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:65ea09a5a3faadd59c2ce96dc7bf0f364986a315949dc6374f04396b0d60e09b", size = 71762, upload-time = "2024-12-24T18:30:48.903Z" }, +version = "1.5.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d0/67/9c61eccb13f0bdca9307614e782fec49ffdde0f7a2314935d489fa93cd9c/kiwisolver-1.5.0.tar.gz", hash = "sha256:d4193f3d9dc3f6f79aaed0e5637f45d98850ebf01f7ca20e69457f3e8946b66a", size = 103482, upload-time = "2026-03-09T13:15:53.382Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ac/f8/06549565caa026e540b7e7bab5c5a90eb7ca986015f4c48dace243cd24d9/kiwisolver-1.5.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:32cc0a5365239a6ea0c6ed461e8838d053b57e397443c0ca894dcc8e388d4374", size = 122802, upload-time = "2026-03-09T13:12:37.515Z" }, + { url = "https://files.pythonhosted.org/packages/84/eb/8476a0818850c563ff343ea7c9c05dcdcbd689a38e01aa31657df01f91fa/kiwisolver-1.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:cc0b66c1eec9021353a4b4483afb12dfd50e3669ffbb9152d6842eb34c7e29fd", size = 66216, upload-time = "2026-03-09T13:12:38.812Z" }, + { url = "https://files.pythonhosted.org/packages/f3/c4/f9c8a6b4c21aed4198566e45923512986d6cef530e7263b3a5f823546561/kiwisolver-1.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:86e0287879f75621ae85197b0877ed2f8b7aa57b511c7331dce2eb6f4de7d476", size = 63917, upload-time = "2026-03-09T13:12:40.053Z" }, + { url = "https://files.pythonhosted.org/packages/f1/0e/ba4ae25d03722f64de8b2c13e80d82ab537a06b30fc7065183c6439357e3/kiwisolver-1.5.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:62f59da443c4f4849f73a51a193b1d9d258dcad0c41bc4d1b8fb2bcc04bfeb22", size = 1628776, upload-time = "2026-03-09T13:12:41.976Z" }, + { url = "https://files.pythonhosted.org/packages/8a/e4/3f43a011bc8a0860d1c96f84d32fa87439d3feedf66e672fef03bf5e8bac/kiwisolver-1.5.0-cp310-cp310-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9190426b7aa26c5229501fa297b8d0653cfd3f5a36f7990c264e157cbf886b3b", size = 1228164, upload-time = "2026-03-09T13:12:44.002Z" }, + { url = "https://files.pythonhosted.org/packages/4b/34/3a901559a1e0c218404f9a61a93be82d45cb8f44453ba43088644980f033/kiwisolver-1.5.0-cp310-cp310-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c8277104ded0a51e699c8c3aff63ce2c56d4ed5519a5f73e0fd7057f959a2b9e", size = 1246656, upload-time = "2026-03-09T13:12:45.557Z" }, + { url = "https://files.pythonhosted.org/packages/87/9e/f78c466ea20527822b95ad38f141f2de1dcd7f23fb8716b002b0d91bbe59/kiwisolver-1.5.0-cp310-cp310-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8f9baf6f0a6e7571c45c8863010b45e837c3ee1c2c77fcd6ef423be91b21fedb", size = 1295562, upload-time = "2026-03-09T13:12:47.562Z" }, + { url = "https://files.pythonhosted.org/packages/0a/66/fd0e4a612e3a286c24e6d6f3a5428d11258ed1909bc530ba3b59807fd980/kiwisolver-1.5.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cff8e5383db4989311f99e814feeb90c4723eb4edca425b9d5d9c3fefcdd9537", size = 2178473, upload-time = "2026-03-09T13:12:50.254Z" }, + { url = "https://files.pythonhosted.org/packages/dc/8e/6cac929e0049539e5ee25c1ee937556f379ba5204840d03008363ced662d/kiwisolver-1.5.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:ebae99ed6764f2b5771c522477b311be313e8841d2e0376db2b10922daebbba4", size = 2274035, upload-time = "2026-03-09T13:12:51.785Z" }, + { url = "https://files.pythonhosted.org/packages/ca/d3/9d0c18f1b52ea8074b792452cf17f1f5a56bd0302a85191f405cfbf9da16/kiwisolver-1.5.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:d5cd5189fc2b6a538b75ae45433140c4823463918f7b1617c31e68b085c0022c", size = 2443217, upload-time = "2026-03-09T13:12:53.329Z" }, + { url = "https://files.pythonhosted.org/packages/45/2a/6e19368803a038b2a90857bf4ee9e3c7b667216d045866bf22d3439fd75e/kiwisolver-1.5.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f42c23db5d1521218a3276bb08666dcb662896a0be7347cba864eca45ff64ede", size = 2249196, upload-time = "2026-03-09T13:12:55.057Z" }, + { url = "https://files.pythonhosted.org/packages/75/2b/3f641dfcbe72e222175d626bacf2f72c3b34312afec949dd1c50afa400f5/kiwisolver-1.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:94eff26096eb5395136634622515b234ecb6c9979824c1f5004c6e3c3c85ccd2", size = 73389, upload-time = "2026-03-09T13:12:56.496Z" }, + { url = "https://files.pythonhosted.org/packages/da/88/299b137b9e0025d8982e03d2d52c123b0a2b159e84b0ef1501ef446339cf/kiwisolver-1.5.0-cp310-cp310-win_arm64.whl", hash = "sha256:dd952e03bfbb096cfe2dd35cd9e00f269969b67536cb4370994afc20ff2d0875", size = 64782, upload-time = "2026-03-09T13:12:57.609Z" }, + { url = "https://files.pythonhosted.org/packages/12/dd/a495a9c104be1c476f0386e714252caf2b7eca883915422a64c50b88c6f5/kiwisolver-1.5.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9eed0f7edbb274413b6ee781cca50541c8c0facd3d6fd289779e494340a2b85c", size = 122798, upload-time = "2026-03-09T13:12:58.963Z" }, + { url = "https://files.pythonhosted.org/packages/11/60/37b4047a2af0cf5ef6d8b4b26e91829ae6fc6a2d1f74524bcb0e7cd28a32/kiwisolver-1.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3c4923e404d6bcd91b6779c009542e5647fef32e4a5d75e115e3bbac6f2335eb", size = 66216, upload-time = "2026-03-09T13:13:00.155Z" }, + { url = "https://files.pythonhosted.org/packages/0a/aa/510dc933d87767584abfe03efa445889996c70c2990f6f87c3ebaa0a18c5/kiwisolver-1.5.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0df54df7e686afa55e6f21fb86195224a6d9beb71d637e8d7920c95cf0f89aac", size = 63911, upload-time = "2026-03-09T13:13:01.671Z" }, + { url = "https://files.pythonhosted.org/packages/80/46/bddc13df6c2a40741e0cc7865bb1c9ed4796b6760bd04ce5fae3928ef917/kiwisolver-1.5.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2517e24d7315eb51c10664cdb865195df38ab74456c677df67bb47f12d088a27", size = 1438209, upload-time = "2026-03-09T13:13:03.385Z" }, + { url = "https://files.pythonhosted.org/packages/fd/d6/76621246f5165e5372f02f5e6f3f48ea336a8f9e96e43997d45b240ed8cd/kiwisolver-1.5.0-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ff710414307fefa903e0d9bdf300972f892c23477829f49504e59834f4195398", size = 1248888, upload-time = "2026-03-09T13:13:05.231Z" }, + { url = "https://files.pythonhosted.org/packages/b2/c1/31559ec6fb39a5b48035ce29bb63ade628f321785f38c384dee3e2c08bc1/kiwisolver-1.5.0-cp311-cp311-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:6176c1811d9d5a04fa391c490cc44f451e240697a16977f11c6f722efb9041db", size = 1266304, upload-time = "2026-03-09T13:13:06.743Z" }, + { url = "https://files.pythonhosted.org/packages/5e/ef/1cb8276f2d29cc6a41e0a042f27946ca347d3a4a75acf85d0a16aa6dcc82/kiwisolver-1.5.0-cp311-cp311-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:50847dca5d197fcbd389c805aa1a1cf32f25d2e7273dc47ab181a517666b68cc", size = 1319650, upload-time = "2026-03-09T13:13:08.607Z" }, + { url = "https://files.pythonhosted.org/packages/4c/e4/5ba3cecd7ce6236ae4a80f67e5d5531287337d0e1f076ca87a5abe4cd5d0/kiwisolver-1.5.0-cp311-cp311-manylinux_2_39_riscv64.whl", hash = "sha256:01808c6d15f4c3e8559595d6d1fe6411c68e4a3822b4b9972b44473b24f4e679", size = 970949, upload-time = "2026-03-09T13:13:10.299Z" }, + { url = "https://files.pythonhosted.org/packages/5a/69/dc61f7ae9a2f071f26004ced87f078235b5507ab6e5acd78f40365655034/kiwisolver-1.5.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:f1f9f4121ec58628c96baa3de1a55a4e3a333c5102c8e94b64e23bf7b2083309", size = 2199125, upload-time = "2026-03-09T13:13:11.841Z" }, + { url = "https://files.pythonhosted.org/packages/e5/7b/abbe0f1b5afa85f8d084b73e90e5f801c0939eba16ac2e49af7c61a6c28d/kiwisolver-1.5.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:b7d335370ae48a780c6e6a6bbfa97342f563744c39c35562f3f367665f5c1de2", size = 2293783, upload-time = "2026-03-09T13:13:14.399Z" }, + { url = "https://files.pythonhosted.org/packages/8a/80/5908ae149d96d81580d604c7f8aefd0e98f4fd728cf172f477e9f2a81744/kiwisolver-1.5.0-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:800ee55980c18545af444d93fdd60c56b580db5cc54867d8cbf8a1dc0829938c", size = 1960726, upload-time = "2026-03-09T13:13:16.047Z" }, + { url = "https://files.pythonhosted.org/packages/84/08/a78cb776f8c085b7143142ce479859cfec086bd09ee638a317040b6ef420/kiwisolver-1.5.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:c438f6ca858697c9ab67eb28246c92508af972e114cac34e57a6d4ba17a3ac08", size = 2464738, upload-time = "2026-03-09T13:13:17.897Z" }, + { url = "https://files.pythonhosted.org/packages/b1/e1/65584da5356ed6cb12c63791a10b208860ac40a83de165cb6a6751a686e3/kiwisolver-1.5.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:8c63c91f95173f9c2a67c7c526b2cea976828a0e7fced9cdcead2802dc10f8a4", size = 2270718, upload-time = "2026-03-09T13:13:19.421Z" }, + { url = "https://files.pythonhosted.org/packages/be/6c/28f17390b62b8f2f520e2915095b3c94d88681ecf0041e75389d9667f202/kiwisolver-1.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:beb7f344487cdcb9e1efe4b7a29681b74d34c08f0043a327a74da852a6749e7b", size = 73480, upload-time = "2026-03-09T13:13:20.818Z" }, + { url = "https://files.pythonhosted.org/packages/d8/0e/2ee5debc4f77a625778fec5501ff3e8036fe361b7ee28ae402a485bb9694/kiwisolver-1.5.0-cp311-cp311-win_arm64.whl", hash = "sha256:ad4ae4ffd1ee9cd11357b4c66b612da9888f4f4daf2f36995eda64bd45370cac", size = 64930, upload-time = "2026-03-09T13:13:21.997Z" }, + { url = "https://files.pythonhosted.org/packages/4d/b2/818b74ebea34dabe6d0c51cb1c572e046730e64844da6ed646d5298c40ce/kiwisolver-1.5.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:4e9750bc21b886308024f8a54ccb9a2cc38ac9fa813bf4348434e3d54f337ff9", size = 123158, upload-time = "2026-03-09T13:13:23.127Z" }, + { url = "https://files.pythonhosted.org/packages/bf/d9/405320f8077e8e1c5c4bd6adc45e1e6edf6d727b6da7f2e2533cf58bff71/kiwisolver-1.5.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:72ec46b7eba5b395e0a7b63025490d3214c11013f4aacb4f5e8d6c3041829588", size = 66388, upload-time = "2026-03-09T13:13:24.765Z" }, + { url = "https://files.pythonhosted.org/packages/99/9f/795fedf35634f746151ca8839d05681ceb6287fbed6cc1c9bf235f7887c2/kiwisolver-1.5.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ed3a984b31da7481b103f68776f7128a89ef26ed40f4dc41a2223cda7fb24819", size = 64068, upload-time = "2026-03-09T13:13:25.878Z" }, + { url = "https://files.pythonhosted.org/packages/c4/13/680c54afe3e65767bed7ec1a15571e1a2f1257128733851ade24abcefbcc/kiwisolver-1.5.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:bb5136fb5352d3f422df33f0c879a1b0c204004324150cc3b5e3c4f310c9049f", size = 1477934, upload-time = "2026-03-09T13:13:27.166Z" }, + { url = "https://files.pythonhosted.org/packages/c8/2f/cebfcdb60fd6a9b0f6b47a9337198bcbad6fbe15e68189b7011fd914911f/kiwisolver-1.5.0-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b2af221f268f5af85e776a73d62b0845fc8baf8ef0abfae79d29c77d0e776aaf", size = 1278537, upload-time = "2026-03-09T13:13:28.707Z" }, + { url = "https://files.pythonhosted.org/packages/f2/0d/9b782923aada3fafb1d6b84e13121954515c669b18af0c26e7d21f579855/kiwisolver-1.5.0-cp312-cp312-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b0f172dc8ffaccb8522d7c5d899de00133f2f1ca7b0a49b7da98e901de87bf2d", size = 1296685, upload-time = "2026-03-09T13:13:30.528Z" }, + { url = "https://files.pythonhosted.org/packages/27/70/83241b6634b04fe44e892688d5208332bde130f38e610c0418f9ede47ded/kiwisolver-1.5.0-cp312-cp312-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:6ab8ba9152203feec73758dad83af9a0bbe05001eb4639e547207c40cfb52083", size = 1346024, upload-time = "2026-03-09T13:13:32.818Z" }, + { url = "https://files.pythonhosted.org/packages/e4/db/30ed226fb271ae1a6431fc0fe0edffb2efe23cadb01e798caeb9f2ceae8f/kiwisolver-1.5.0-cp312-cp312-manylinux_2_39_riscv64.whl", hash = "sha256:cdee07c4d7f6d72008d3f73b9bf027f4e11550224c7c50d8df1ae4a37c1402a6", size = 987241, upload-time = "2026-03-09T13:13:34.435Z" }, + { url = "https://files.pythonhosted.org/packages/ec/bd/c314595208e4c9587652d50959ead9e461995389664e490f4dce7ff0f782/kiwisolver-1.5.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7c60d3c9b06fb23bd9c6139281ccbdc384297579ae037f08ae90c69f6845c0b1", size = 2227742, upload-time = "2026-03-09T13:13:36.4Z" }, + { url = "https://files.pythonhosted.org/packages/c1/43/0499cec932d935229b5543d073c2b87c9c22846aab48881e9d8d6e742a2d/kiwisolver-1.5.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:e315e5ec90d88e140f57696ff85b484ff68bb311e36f2c414aa4286293e6dee0", size = 2323966, upload-time = "2026-03-09T13:13:38.204Z" }, + { url = "https://files.pythonhosted.org/packages/3d/6f/79b0d760907965acfd9d61826a3d41f8f093c538f55cd2633d3f0db269f6/kiwisolver-1.5.0-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:1465387ac63576c3e125e5337a6892b9e99e0627d52317f3ca79e6930d889d15", size = 1977417, upload-time = "2026-03-09T13:13:39.966Z" }, + { url = "https://files.pythonhosted.org/packages/ab/31/01d0537c41cb75a551a438c3c7a80d0c60d60b81f694dac83dd436aec0d0/kiwisolver-1.5.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:530a3fd64c87cffa844d4b6b9768774763d9caa299e9b75d8eca6a4423b31314", size = 2491238, upload-time = "2026-03-09T13:13:41.698Z" }, + { url = "https://files.pythonhosted.org/packages/e4/34/8aefdd0be9cfd00a44509251ba864f5caf2991e36772e61c408007e7f417/kiwisolver-1.5.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1d9daea4ea6b9be74fe2f01f7fbade8d6ffab263e781274cffca0dba9be9eec9", size = 2294947, upload-time = "2026-03-09T13:13:43.343Z" }, + { url = "https://files.pythonhosted.org/packages/ad/cf/0348374369ca588f8fe9c338fae49fa4e16eeb10ffb3d012f23a54578a9e/kiwisolver-1.5.0-cp312-cp312-win_amd64.whl", hash = "sha256:f18c2d9782259a6dc132fdc7a63c168cbc74b35284b6d75c673958982a378384", size = 73569, upload-time = "2026-03-09T13:13:45.792Z" }, + { url = "https://files.pythonhosted.org/packages/28/26/192b26196e2316e2bd29deef67e37cdf9870d9af8e085e521afff0fed526/kiwisolver-1.5.0-cp312-cp312-win_arm64.whl", hash = "sha256:f7c7553b13f69c1b29a5bde08ddc6d9d0c8bfb84f9ed01c30db25944aeb852a7", size = 64997, upload-time = "2026-03-09T13:13:46.878Z" }, + { url = "https://files.pythonhosted.org/packages/1c/fa/2910df836372d8761bb6eff7d8bdcb1613b5c2e03f260efe7abe34d388a7/kiwisolver-1.5.0-graalpy312-graalpy250_312_native-macosx_10_13_x86_64.whl", hash = "sha256:5ae8e62c147495b01a0f4765c878e9bfdf843412446a247e28df59936e99e797", size = 130262, upload-time = "2026-03-09T13:15:35.629Z" }, + { url = "https://files.pythonhosted.org/packages/0f/41/c5f71f9f00aabcc71fee8b7475e3f64747282580c2fe748961ba29b18385/kiwisolver-1.5.0-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:f6764a4ccab3078db14a632420930f6186058750df066b8ea2a7106df91d3203", size = 138036, upload-time = "2026-03-09T13:15:36.894Z" }, + { url = "https://files.pythonhosted.org/packages/fa/06/7399a607f434119c6e1fdc8ec89a8d51ccccadf3341dee4ead6bd14caaf5/kiwisolver-1.5.0-graalpy312-graalpy250_312_native-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c31c13da98624f957b0fb1b5bae5383b2333c2c3f6793d9825dd5ce79b525cb7", size = 194295, upload-time = "2026-03-09T13:15:38.22Z" }, + { url = "https://files.pythonhosted.org/packages/b5/91/53255615acd2a1eaca307ede3c90eb550bae9c94581f8c00081b6b1c8f44/kiwisolver-1.5.0-graalpy312-graalpy250_312_native-win_amd64.whl", hash = "sha256:1f1489f769582498610e015a8ef2d36f28f505ab3096d0e16b4858a9ec214f57", size = 75987, upload-time = "2026-03-09T13:15:39.65Z" }, + { url = "https://files.pythonhosted.org/packages/17/6f/6fd4f690a40c2582fa34b97d2678f718acf3706b91d270c65ecb455d0a06/kiwisolver-1.5.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:295d9ffe712caa9f8a3081de8d32fc60191b4b51c76f02f951fd8407253528f4", size = 59606, upload-time = "2026-03-09T13:15:40.81Z" }, + { url = "https://files.pythonhosted.org/packages/82/a0/2355d5e3b338f13ce63f361abb181e3b6ea5fffdb73f739b3e80efa76159/kiwisolver-1.5.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:51e8c4084897de9f05898c2c2a39af6318044ae969d46ff7a34ed3f96274adca", size = 57537, upload-time = "2026-03-09T13:15:42.071Z" }, + { url = "https://files.pythonhosted.org/packages/c8/b9/1d50e610ecadebe205b71d6728fd224ce0e0ca6aba7b9cbe1da049203ac5/kiwisolver-1.5.0-pp310-pypy310_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:b83af57bdddef03c01a9138034c6ff03181a3028d9a1003b301eb1a55e161a3f", size = 79888, upload-time = "2026-03-09T13:15:43.317Z" }, + { url = "https://files.pythonhosted.org/packages/cd/ee/b85ffcd75afed0357d74f0e6fc02a4507da441165de1ca4760b9f496390d/kiwisolver-1.5.0-pp310-pypy310_pp73-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bf4679a3d71012a7c2bf360e5cd878fbd5e4fcac0896b56393dec239d81529ed", size = 77584, upload-time = "2026-03-09T13:15:44.605Z" }, + { url = "https://files.pythonhosted.org/packages/6b/dd/644d0dde6010a8583b4cd66dd41c5f83f5325464d15c4f490b3340ab73b4/kiwisolver-1.5.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:41024ed50e44ab1a60d3fe0a9d15a4ccc9f5f2b1d814ff283c8d01134d5b81bc", size = 73390, upload-time = "2026-03-09T13:15:45.832Z" }, + { url = "https://files.pythonhosted.org/packages/e9/eb/5fcbbbf9a0e2c3a35effb88831a483345326bbc3a030a3b5b69aee647f84/kiwisolver-1.5.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:ec4c85dc4b687c7f7f15f553ff26a98bfe8c58f5f7f0ac8905f0ba4c7be60232", size = 59532, upload-time = "2026-03-09T13:15:47.047Z" }, + { url = "https://files.pythonhosted.org/packages/c3/9b/e17104555bb4db148fd52327feea1e96be4b88e8e008b029002c281a21ab/kiwisolver-1.5.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:12e91c215a96e39f57989c8912ae761286ac5a9584d04030ceb3368a357f017a", size = 57420, upload-time = "2026-03-09T13:15:48.199Z" }, + { url = "https://files.pythonhosted.org/packages/48/44/2b5b95b7aa39fb2d8d9d956e0f3d5d45aef2ae1d942d4c3ffac2f9cfed1a/kiwisolver-1.5.0-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:be4a51a55833dc29ab5d7503e7bcb3b3af3402d266018137127450005cdfe737", size = 79892, upload-time = "2026-03-09T13:15:49.694Z" }, + { url = "https://files.pythonhosted.org/packages/52/7d/7157f9bba6b455cfb4632ed411e199fc8b8977642c2b12082e1bd9e6d173/kiwisolver-1.5.0-pp311-pypy311_pp73-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:daae526907e262de627d8f70058a0f64acc9e2641c164c99c8f594b34a799a16", size = 77603, upload-time = "2026-03-09T13:15:50.945Z" }, + { url = "https://files.pythonhosted.org/packages/0a/dd/8050c947d435c8d4bc94e3252f4d8bb8a76cfb424f043a8680be637a57f1/kiwisolver-1.5.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:59cd8683f575d96df5bb48f6add94afc055012c29e28124fcae2b63661b9efb1", size = 73558, upload-time = "2026-03-09T13:15:52.112Z" }, ] [[package]] @@ -1459,14 +5358,14 @@ sdist = { url = "https://files.pythonhosted.org/packages/0e/72/a3add0e4eec4eb9e2 [[package]] name = "lazy-loader" -version = "0.4" +version = "0.5" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "packaging" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/6f/6b/c875b30a1ba490860c93da4cabf479e03f584eba06fe5963f6f6644653d8/lazy_loader-0.4.tar.gz", hash = "sha256:47c75182589b91a4e1a85a136c074285a5ad4d9f39c63e0d7fb76391c4574cd1", size = 15431, upload-time = "2024-04-05T13:03:12.261Z" } +sdist = { url = "https://files.pythonhosted.org/packages/49/ac/21a1f8aa3777f5658576777ea76bfb124b702c520bbe90edf4ae9915eafa/lazy_loader-0.5.tar.gz", hash = "sha256:717f9179a0dbed357012ddad50a5ad3d5e4d9a0b8712680d4e687f5e6e6ed9b3", size = 15294, upload-time = "2026-03-06T15:45:09.054Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/83/60/d497a310bde3f01cb805196ac61b7ad6dc5dcf8dce66634dc34364b20b4f/lazy_loader-0.4-py3-none-any.whl", hash = "sha256:342aa8e14d543a154047afb4ba8ef17f5563baad3fc610d7b15b213b0f119efc", size = 12097, upload-time = "2024-04-05T13:03:10.514Z" }, + { url = "https://files.pythonhosted.org/packages/8a/a1/8d812e53a5da1687abb10445275d41a8b13adb781bbf7196ddbcf8d88505/lazy_loader-0.5-py3-none-any.whl", hash = "sha256:ab0ea149e9c554d4ffeeb21105ac60bed7f3b4fd69b1d2360a4add51b170b005", size = 8044, upload-time = "2026-03-06T15:45:07.668Z" }, ] [[package]] @@ -1483,101 +5382,112 @@ wheels = [ [[package]] name = "markdown" -version = "3.10" +version = "3.10.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/7d/ab/7dd27d9d863b3376fcf23a5a13cb5d024aed1db46f963f1b5735ae43b3be/markdown-3.10.tar.gz", hash = "sha256:37062d4f2aa4b2b6b32aefb80faa300f82cc790cb949a35b8caede34f2b68c0e", size = 364931, upload-time = "2025-11-03T19:51:15.007Z" } +sdist = { url = "https://files.pythonhosted.org/packages/2b/f4/69fa6ed85ae003c2378ffa8f6d2e3234662abd02c10d216c0ba96081a238/markdown-3.10.2.tar.gz", hash = "sha256:994d51325d25ad8aa7ce4ebaec003febcce822c3f8c911e3b17c52f7f589f950", size = 368805, upload-time = "2026-02-09T14:57:26.942Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/70/81/54e3ce63502cd085a0c556652a4e1b919c45a446bd1e5300e10c44c8c521/markdown-3.10-py3-none-any.whl", hash = "sha256:b5b99d6951e2e4948d939255596523444c0e677c669700b1d17aa4a8a464cb7c", size = 107678, upload-time = "2025-11-03T19:51:13.887Z" }, + { url = "https://files.pythonhosted.org/packages/de/1f/77fa3081e4f66ca3576c896ae5d31c3002ac6607f9747d2e3aa49227e464/markdown-3.10.2-py3-none-any.whl", hash = "sha256:e91464b71ae3ee7afd3017d9f358ef0baf158fd9a298db92f1d4761133824c36", size = 108180, upload-time = "2026-02-09T14:57:25.787Z" }, ] [[package]] name = "markdown-it-py" -version = "3.0.0" +version = "4.0.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "mdurl" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/38/71/3b932df36c1a044d397a1f92d1cf91ee0a503d91e470cbd670aa66b07ed0/markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb", size = 74596, upload-time = "2023-06-03T06:41:14.443Z" } +sdist = { url = "https://files.pythonhosted.org/packages/5b/f5/4ec618ed16cc4f8fb3b701563655a69816155e79e24a17b651541804721d/markdown_it_py-4.0.0.tar.gz", hash = "sha256:cb0a2b4aa34f932c007117b194e945bd74e0ec24133ceb5bac59009cda1cb9f3", size = 73070, upload-time = "2025-08-11T12:57:52.854Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/42/d7/1ec15b46af6af88f19b8e5ffea08fa375d433c998b8a7639e76935c14f1f/markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1", size = 87528, upload-time = "2023-06-03T06:41:11.019Z" }, + { url = "https://files.pythonhosted.org/packages/94/54/e7d793b573f298e1c9013b8c4dade17d481164aa517d1d7148619c2cedbf/markdown_it_py-4.0.0-py3-none-any.whl", hash = "sha256:87327c59b172c5011896038353a81343b6754500a08cd7a4973bb48c6d578147", size = 87321, upload-time = "2025-08-11T12:57:51.923Z" }, ] [[package]] name = "markupsafe" -version = "3.0.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b2/97/5d42485e71dfc078108a86d6de8fa46db44a1a9295e89c5d6d4a06e23a62/markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0", size = 20537, upload-time = "2024-10-18T15:21:54.129Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/04/90/d08277ce111dd22f77149fd1a5d4653eeb3b3eaacbdfcbae5afb2600eebd/MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8", size = 14357, upload-time = "2024-10-18T15:20:51.44Z" }, - { url = "https://files.pythonhosted.org/packages/04/e1/6e2194baeae0bca1fae6629dc0cbbb968d4d941469cbab11a3872edff374/MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158", size = 12393, upload-time = "2024-10-18T15:20:52.426Z" }, - { url = "https://files.pythonhosted.org/packages/1d/69/35fa85a8ece0a437493dc61ce0bb6d459dcba482c34197e3efc829aa357f/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579", size = 21732, upload-time = "2024-10-18T15:20:53.578Z" }, - { url = "https://files.pythonhosted.org/packages/22/35/137da042dfb4720b638d2937c38a9c2df83fe32d20e8c8f3185dbfef05f7/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d", size = 20866, upload-time = "2024-10-18T15:20:55.06Z" }, - { url = "https://files.pythonhosted.org/packages/29/28/6d029a903727a1b62edb51863232152fd335d602def598dade38996887f0/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb", size = 20964, upload-time = "2024-10-18T15:20:55.906Z" }, - { url = "https://files.pythonhosted.org/packages/cc/cd/07438f95f83e8bc028279909d9c9bd39e24149b0d60053a97b2bc4f8aa51/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b", size = 21977, upload-time = "2024-10-18T15:20:57.189Z" }, - { url = "https://files.pythonhosted.org/packages/29/01/84b57395b4cc062f9c4c55ce0df7d3108ca32397299d9df00fedd9117d3d/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c", size = 21366, upload-time = "2024-10-18T15:20:58.235Z" }, - { url = "https://files.pythonhosted.org/packages/bd/6e/61ebf08d8940553afff20d1fb1ba7294b6f8d279df9fd0c0db911b4bbcfd/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171", size = 21091, upload-time = "2024-10-18T15:20:59.235Z" }, - { url = "https://files.pythonhosted.org/packages/11/23/ffbf53694e8c94ebd1e7e491de185124277964344733c45481f32ede2499/MarkupSafe-3.0.2-cp310-cp310-win32.whl", hash = "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50", size = 15065, upload-time = "2024-10-18T15:21:00.307Z" }, - { url = "https://files.pythonhosted.org/packages/44/06/e7175d06dd6e9172d4a69a72592cb3f7a996a9c396eee29082826449bbc3/MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a", size = 15514, upload-time = "2024-10-18T15:21:01.122Z" }, - { url = "https://files.pythonhosted.org/packages/6b/28/bbf83e3f76936960b850435576dd5e67034e200469571be53f69174a2dfd/MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d", size = 14353, upload-time = "2024-10-18T15:21:02.187Z" }, - { url = "https://files.pythonhosted.org/packages/6c/30/316d194b093cde57d448a4c3209f22e3046c5bb2fb0820b118292b334be7/MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93", size = 12392, upload-time = "2024-10-18T15:21:02.941Z" }, - { url = "https://files.pythonhosted.org/packages/f2/96/9cdafba8445d3a53cae530aaf83c38ec64c4d5427d975c974084af5bc5d2/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832", size = 23984, upload-time = "2024-10-18T15:21:03.953Z" }, - { url = "https://files.pythonhosted.org/packages/f1/a4/aefb044a2cd8d7334c8a47d3fb2c9f328ac48cb349468cc31c20b539305f/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84", size = 23120, upload-time = "2024-10-18T15:21:06.495Z" }, - { url = "https://files.pythonhosted.org/packages/8d/21/5e4851379f88f3fad1de30361db501300d4f07bcad047d3cb0449fc51f8c/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca", size = 23032, upload-time = "2024-10-18T15:21:07.295Z" }, - { url = "https://files.pythonhosted.org/packages/00/7b/e92c64e079b2d0d7ddf69899c98842f3f9a60a1ae72657c89ce2655c999d/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798", size = 24057, upload-time = "2024-10-18T15:21:08.073Z" }, - { url = "https://files.pythonhosted.org/packages/f9/ac/46f960ca323037caa0a10662ef97d0a4728e890334fc156b9f9e52bcc4ca/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e", size = 23359, upload-time = "2024-10-18T15:21:09.318Z" }, - { url = "https://files.pythonhosted.org/packages/69/84/83439e16197337b8b14b6a5b9c2105fff81d42c2a7c5b58ac7b62ee2c3b1/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4", size = 23306, upload-time = "2024-10-18T15:21:10.185Z" }, - { url = "https://files.pythonhosted.org/packages/9a/34/a15aa69f01e2181ed8d2b685c0d2f6655d5cca2c4db0ddea775e631918cd/MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d", size = 15094, upload-time = "2024-10-18T15:21:11.005Z" }, - { url = "https://files.pythonhosted.org/packages/da/b8/3a3bd761922d416f3dc5d00bfbed11f66b1ab89a0c2b6e887240a30b0f6b/MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b", size = 15521, upload-time = "2024-10-18T15:21:12.911Z" }, - { url = "https://files.pythonhosted.org/packages/22/09/d1f21434c97fc42f09d290cbb6350d44eb12f09cc62c9476effdb33a18aa/MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf", size = 14274, upload-time = "2024-10-18T15:21:13.777Z" }, - { url = "https://files.pythonhosted.org/packages/6b/b0/18f76bba336fa5aecf79d45dcd6c806c280ec44538b3c13671d49099fdd0/MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225", size = 12348, upload-time = "2024-10-18T15:21:14.822Z" }, - { url = "https://files.pythonhosted.org/packages/e0/25/dd5c0f6ac1311e9b40f4af06c78efde0f3b5cbf02502f8ef9501294c425b/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028", size = 24149, upload-time = "2024-10-18T15:21:15.642Z" }, - { url = "https://files.pythonhosted.org/packages/f3/f0/89e7aadfb3749d0f52234a0c8c7867877876e0a20b60e2188e9850794c17/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8", size = 23118, upload-time = "2024-10-18T15:21:17.133Z" }, - { url = "https://files.pythonhosted.org/packages/d5/da/f2eeb64c723f5e3777bc081da884b414671982008c47dcc1873d81f625b6/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c", size = 22993, upload-time = "2024-10-18T15:21:18.064Z" }, - { url = "https://files.pythonhosted.org/packages/da/0e/1f32af846df486dce7c227fe0f2398dc7e2e51d4a370508281f3c1c5cddc/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557", size = 24178, upload-time = "2024-10-18T15:21:18.859Z" }, - { url = "https://files.pythonhosted.org/packages/c4/f6/bb3ca0532de8086cbff5f06d137064c8410d10779c4c127e0e47d17c0b71/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22", size = 23319, upload-time = "2024-10-18T15:21:19.671Z" }, - { url = "https://files.pythonhosted.org/packages/a2/82/8be4c96ffee03c5b4a034e60a31294daf481e12c7c43ab8e34a1453ee48b/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48", size = 23352, upload-time = "2024-10-18T15:21:20.971Z" }, - { url = "https://files.pythonhosted.org/packages/51/ae/97827349d3fcffee7e184bdf7f41cd6b88d9919c80f0263ba7acd1bbcb18/MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30", size = 15097, upload-time = "2024-10-18T15:21:22.646Z" }, - { url = "https://files.pythonhosted.org/packages/c1/80/a61f99dc3a936413c3ee4e1eecac96c0da5ed07ad56fd975f1a9da5bc630/MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87", size = 15601, upload-time = "2024-10-18T15:21:23.499Z" }, +version = "3.0.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7e/99/7690b6d4034fffd95959cbe0c02de8deb3098cc577c67bb6a24fe5d7caa7/markupsafe-3.0.3.tar.gz", hash = "sha256:722695808f4b6457b320fdc131280796bdceb04ab50fe1795cd540799ebe1698", size = 80313, upload-time = "2025-09-27T18:37:40.426Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e8/4b/3541d44f3937ba468b75da9eebcae497dcf67adb65caa16760b0a6807ebb/markupsafe-3.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2f981d352f04553a7171b8e44369f2af4055f888dfb147d55e42d29e29e74559", size = 11631, upload-time = "2025-09-27T18:36:05.558Z" }, + { url = "https://files.pythonhosted.org/packages/98/1b/fbd8eed11021cabd9226c37342fa6ca4e8a98d8188a8d9b66740494960e4/markupsafe-3.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e1c1493fb6e50ab01d20a22826e57520f1284df32f2d8601fdd90b6304601419", size = 12057, upload-time = "2025-09-27T18:36:07.165Z" }, + { url = "https://files.pythonhosted.org/packages/40/01/e560d658dc0bb8ab762670ece35281dec7b6c1b33f5fbc09ebb57a185519/markupsafe-3.0.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1ba88449deb3de88bd40044603fafffb7bc2b055d626a330323a9ed736661695", size = 22050, upload-time = "2025-09-27T18:36:08.005Z" }, + { url = "https://files.pythonhosted.org/packages/af/cd/ce6e848bbf2c32314c9b237839119c5a564a59725b53157c856e90937b7a/markupsafe-3.0.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f42d0984e947b8adf7dd6dde396e720934d12c506ce84eea8476409563607591", size = 20681, upload-time = "2025-09-27T18:36:08.881Z" }, + { url = "https://files.pythonhosted.org/packages/c9/2a/b5c12c809f1c3045c4d580b035a743d12fcde53cf685dbc44660826308da/markupsafe-3.0.3-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c0c0b3ade1c0b13b936d7970b1d37a57acde9199dc2aecc4c336773e1d86049c", size = 20705, upload-time = "2025-09-27T18:36:10.131Z" }, + { url = "https://files.pythonhosted.org/packages/cf/e3/9427a68c82728d0a88c50f890d0fc072a1484de2f3ac1ad0bfc1a7214fd5/markupsafe-3.0.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:0303439a41979d9e74d18ff5e2dd8c43ed6c6001fd40e5bf2e43f7bd9bbc523f", size = 21524, upload-time = "2025-09-27T18:36:11.324Z" }, + { url = "https://files.pythonhosted.org/packages/bc/36/23578f29e9e582a4d0278e009b38081dbe363c5e7165113fad546918a232/markupsafe-3.0.3-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:d2ee202e79d8ed691ceebae8e0486bd9a2cd4794cec4824e1c99b6f5009502f6", size = 20282, upload-time = "2025-09-27T18:36:12.573Z" }, + { url = "https://files.pythonhosted.org/packages/56/21/dca11354e756ebd03e036bd8ad58d6d7168c80ce1fe5e75218e4945cbab7/markupsafe-3.0.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:177b5253b2834fe3678cb4a5f0059808258584c559193998be2601324fdeafb1", size = 20745, upload-time = "2025-09-27T18:36:13.504Z" }, + { url = "https://files.pythonhosted.org/packages/87/99/faba9369a7ad6e4d10b6a5fbf71fa2a188fe4a593b15f0963b73859a1bbd/markupsafe-3.0.3-cp310-cp310-win32.whl", hash = "sha256:2a15a08b17dd94c53a1da0438822d70ebcd13f8c3a95abe3a9ef9f11a94830aa", size = 14571, upload-time = "2025-09-27T18:36:14.779Z" }, + { url = "https://files.pythonhosted.org/packages/d6/25/55dc3ab959917602c96985cb1253efaa4ff42f71194bddeb61eb7278b8be/markupsafe-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:c4ffb7ebf07cfe8931028e3e4c85f0357459a3f9f9490886198848f4fa002ec8", size = 15056, upload-time = "2025-09-27T18:36:16.125Z" }, + { url = "https://files.pythonhosted.org/packages/d0/9e/0a02226640c255d1da0b8d12e24ac2aa6734da68bff14c05dd53b94a0fc3/markupsafe-3.0.3-cp310-cp310-win_arm64.whl", hash = "sha256:e2103a929dfa2fcaf9bb4e7c091983a49c9ac3b19c9061b6d5427dd7d14d81a1", size = 13932, upload-time = "2025-09-27T18:36:17.311Z" }, + { url = "https://files.pythonhosted.org/packages/08/db/fefacb2136439fc8dd20e797950e749aa1f4997ed584c62cfb8ef7c2be0e/markupsafe-3.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1cc7ea17a6824959616c525620e387f6dd30fec8cb44f649e31712db02123dad", size = 11631, upload-time = "2025-09-27T18:36:18.185Z" }, + { url = "https://files.pythonhosted.org/packages/e1/2e/5898933336b61975ce9dc04decbc0a7f2fee78c30353c5efba7f2d6ff27a/markupsafe-3.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4bd4cd07944443f5a265608cc6aab442e4f74dff8088b0dfc8238647b8f6ae9a", size = 12058, upload-time = "2025-09-27T18:36:19.444Z" }, + { url = "https://files.pythonhosted.org/packages/1d/09/adf2df3699d87d1d8184038df46a9c80d78c0148492323f4693df54e17bb/markupsafe-3.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b5420a1d9450023228968e7e6a9ce57f65d148ab56d2313fcd589eee96a7a50", size = 24287, upload-time = "2025-09-27T18:36:20.768Z" }, + { url = "https://files.pythonhosted.org/packages/30/ac/0273f6fcb5f42e314c6d8cd99effae6a5354604d461b8d392b5ec9530a54/markupsafe-3.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0bf2a864d67e76e5c9a34dc26ec616a66b9888e25e7b9460e1c76d3293bd9dbf", size = 22940, upload-time = "2025-09-27T18:36:22.249Z" }, + { url = "https://files.pythonhosted.org/packages/19/ae/31c1be199ef767124c042c6c3e904da327a2f7f0cd63a0337e1eca2967a8/markupsafe-3.0.3-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc51efed119bc9cfdf792cdeaa4d67e8f6fcccab66ed4bfdd6bde3e59bfcbb2f", size = 21887, upload-time = "2025-09-27T18:36:23.535Z" }, + { url = "https://files.pythonhosted.org/packages/b2/76/7edcab99d5349a4532a459e1fe64f0b0467a3365056ae550d3bcf3f79e1e/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:068f375c472b3e7acbe2d5318dea141359e6900156b5b2ba06a30b169086b91a", size = 23692, upload-time = "2025-09-27T18:36:24.823Z" }, + { url = "https://files.pythonhosted.org/packages/a4/28/6e74cdd26d7514849143d69f0bf2399f929c37dc2b31e6829fd2045b2765/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:7be7b61bb172e1ed687f1754f8e7484f1c8019780f6f6b0786e76bb01c2ae115", size = 21471, upload-time = "2025-09-27T18:36:25.95Z" }, + { url = "https://files.pythonhosted.org/packages/62/7e/a145f36a5c2945673e590850a6f8014318d5577ed7e5920a4b3448e0865d/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f9e130248f4462aaa8e2552d547f36ddadbeaa573879158d721bbd33dfe4743a", size = 22923, upload-time = "2025-09-27T18:36:27.109Z" }, + { url = "https://files.pythonhosted.org/packages/0f/62/d9c46a7f5c9adbeeeda52f5b8d802e1094e9717705a645efc71b0913a0a8/markupsafe-3.0.3-cp311-cp311-win32.whl", hash = "sha256:0db14f5dafddbb6d9208827849fad01f1a2609380add406671a26386cdf15a19", size = 14572, upload-time = "2025-09-27T18:36:28.045Z" }, + { url = "https://files.pythonhosted.org/packages/83/8a/4414c03d3f891739326e1783338e48fb49781cc915b2e0ee052aa490d586/markupsafe-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:de8a88e63464af587c950061a5e6a67d3632e36df62b986892331d4620a35c01", size = 15077, upload-time = "2025-09-27T18:36:29.025Z" }, + { url = "https://files.pythonhosted.org/packages/35/73/893072b42e6862f319b5207adc9ae06070f095b358655f077f69a35601f0/markupsafe-3.0.3-cp311-cp311-win_arm64.whl", hash = "sha256:3b562dd9e9ea93f13d53989d23a7e775fdfd1066c33494ff43f5418bc8c58a5c", size = 13876, upload-time = "2025-09-27T18:36:29.954Z" }, + { url = "https://files.pythonhosted.org/packages/5a/72/147da192e38635ada20e0a2e1a51cf8823d2119ce8883f7053879c2199b5/markupsafe-3.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d53197da72cc091b024dd97249dfc7794d6a56530370992a5e1a08983ad9230e", size = 11615, upload-time = "2025-09-27T18:36:30.854Z" }, + { url = "https://files.pythonhosted.org/packages/9a/81/7e4e08678a1f98521201c3079f77db69fb552acd56067661f8c2f534a718/markupsafe-3.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1872df69a4de6aead3491198eaf13810b565bdbeec3ae2dc8780f14458ec73ce", size = 12020, upload-time = "2025-09-27T18:36:31.971Z" }, + { url = "https://files.pythonhosted.org/packages/1e/2c/799f4742efc39633a1b54a92eec4082e4f815314869865d876824c257c1e/markupsafe-3.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3a7e8ae81ae39e62a41ec302f972ba6ae23a5c5396c8e60113e9066ef893da0d", size = 24332, upload-time = "2025-09-27T18:36:32.813Z" }, + { url = "https://files.pythonhosted.org/packages/3c/2e/8d0c2ab90a8c1d9a24f0399058ab8519a3279d1bd4289511d74e909f060e/markupsafe-3.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d6dd0be5b5b189d31db7cda48b91d7e0a9795f31430b7f271219ab30f1d3ac9d", size = 22947, upload-time = "2025-09-27T18:36:33.86Z" }, + { url = "https://files.pythonhosted.org/packages/2c/54/887f3092a85238093a0b2154bd629c89444f395618842e8b0c41783898ea/markupsafe-3.0.3-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:94c6f0bb423f739146aec64595853541634bde58b2135f27f61c1ffd1cd4d16a", size = 21962, upload-time = "2025-09-27T18:36:35.099Z" }, + { url = "https://files.pythonhosted.org/packages/c9/2f/336b8c7b6f4a4d95e91119dc8521402461b74a485558d8f238a68312f11c/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:be8813b57049a7dc738189df53d69395eba14fb99345e0a5994914a3864c8a4b", size = 23760, upload-time = "2025-09-27T18:36:36.001Z" }, + { url = "https://files.pythonhosted.org/packages/32/43/67935f2b7e4982ffb50a4d169b724d74b62a3964bc1a9a527f5ac4f1ee2b/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:83891d0e9fb81a825d9a6d61e3f07550ca70a076484292a70fde82c4b807286f", size = 21529, upload-time = "2025-09-27T18:36:36.906Z" }, + { url = "https://files.pythonhosted.org/packages/89/e0/4486f11e51bbba8b0c041098859e869e304d1c261e59244baa3d295d47b7/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:77f0643abe7495da77fb436f50f8dab76dbc6e5fd25d39589a0f1fe6548bfa2b", size = 23015, upload-time = "2025-09-27T18:36:37.868Z" }, + { url = "https://files.pythonhosted.org/packages/2f/e1/78ee7a023dac597a5825441ebd17170785a9dab23de95d2c7508ade94e0e/markupsafe-3.0.3-cp312-cp312-win32.whl", hash = "sha256:d88b440e37a16e651bda4c7c2b930eb586fd15ca7406cb39e211fcff3bf3017d", size = 14540, upload-time = "2025-09-27T18:36:38.761Z" }, + { url = "https://files.pythonhosted.org/packages/aa/5b/bec5aa9bbbb2c946ca2733ef9c4ca91c91b6a24580193e891b5f7dbe8e1e/markupsafe-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:26a5784ded40c9e318cfc2bdb30fe164bdb8665ded9cd64d500a34fb42067b1c", size = 15105, upload-time = "2025-09-27T18:36:39.701Z" }, + { url = "https://files.pythonhosted.org/packages/e5/f1/216fc1bbfd74011693a4fd837e7026152e89c4bcf3e77b6692fba9923123/markupsafe-3.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:35add3b638a5d900e807944a078b51922212fb3dedb01633a8defc4b01a3c85f", size = 13906, upload-time = "2025-09-27T18:36:40.689Z" }, ] [[package]] name = "matplotlib" -version = "3.10.3" +version = "3.10.8" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "contourpy" }, + { name = "contourpy", version = "1.3.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "contourpy", version = "1.3.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, { name = "cycler" }, { name = "fonttools" }, { name = "kiwisolver" }, - { name = "numpy" }, + { name = "numpy", version = "1.26.4", source = { registry = "https://pypi.org/simple" }, marker = "extra == 'extra-16-inference-models-onnx-jp6-cu126' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and extra == 'extra-16-inference-models-falcon-perception') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "2.4.4", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and extra == 'extra-16-inference-models-falcon-perception') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, { name = "packaging" }, { name = "pillow" }, { name = "pyparsing" }, { name = "python-dateutil" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/26/91/d49359a21893183ed2a5b6c76bec40e0b1dcbf8ca148f864d134897cfc75/matplotlib-3.10.3.tar.gz", hash = "sha256:2f82d2c5bb7ae93aaaa4cd42aca65d76ce6376f83304fa3a630b569aca274df0", size = 34799811, upload-time = "2025-05-08T19:10:54.39Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d0/ea/2bba25d289d389c7451f331ecd593944b3705f06ddf593fa7be75037d308/matplotlib-3.10.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:213fadd6348d106ca7db99e113f1bea1e65e383c3ba76e8556ba4a3054b65ae7", size = 8167862, upload-time = "2025-05-08T19:09:39.563Z" }, - { url = "https://files.pythonhosted.org/packages/41/81/cc70b5138c926604e8c9ed810ed4c79e8116ba72e02230852f5c12c87ba2/matplotlib-3.10.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d3bec61cb8221f0ca6313889308326e7bb303d0d302c5cc9e523b2f2e6c73deb", size = 8042149, upload-time = "2025-05-08T19:09:42.413Z" }, - { url = "https://files.pythonhosted.org/packages/4a/9a/0ff45b6bfa42bb16de597e6058edf2361c298ad5ef93b327728145161bbf/matplotlib-3.10.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c21ae75651c0231b3ba014b6d5e08fb969c40cdb5a011e33e99ed0c9ea86ecb", size = 8453719, upload-time = "2025-05-08T19:09:44.901Z" }, - { url = "https://files.pythonhosted.org/packages/85/c7/1866e972fed6d71ef136efbc980d4d1854ab7ef1ea8152bbd995ca231c81/matplotlib-3.10.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a49e39755580b08e30e3620efc659330eac5d6534ab7eae50fa5e31f53ee4e30", size = 8590801, upload-time = "2025-05-08T19:09:47.404Z" }, - { url = "https://files.pythonhosted.org/packages/5d/b9/748f6626d534ab7e255bdc39dc22634d337cf3ce200f261b5d65742044a1/matplotlib-3.10.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cf4636203e1190871d3a73664dea03d26fb019b66692cbfd642faafdad6208e8", size = 9402111, upload-time = "2025-05-08T19:09:49.474Z" }, - { url = "https://files.pythonhosted.org/packages/1f/78/8bf07bd8fb67ea5665a6af188e70b57fcb2ab67057daa06b85a08e59160a/matplotlib-3.10.3-cp310-cp310-win_amd64.whl", hash = "sha256:fd5641a9bb9d55f4dd2afe897a53b537c834b9012684c8444cc105895c8c16fd", size = 8057213, upload-time = "2025-05-08T19:09:51.489Z" }, - { url = "https://files.pythonhosted.org/packages/f5/bd/af9f655456f60fe1d575f54fb14704ee299b16e999704817a7645dfce6b0/matplotlib-3.10.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:0ef061f74cd488586f552d0c336b2f078d43bc00dc473d2c3e7bfee2272f3fa8", size = 8178873, upload-time = "2025-05-08T19:09:53.857Z" }, - { url = "https://files.pythonhosted.org/packages/c2/86/e1c86690610661cd716eda5f9d0b35eaf606ae6c9b6736687cfc8f2d0cd8/matplotlib-3.10.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d96985d14dc5f4a736bbea4b9de9afaa735f8a0fc2ca75be2fa9e96b2097369d", size = 8052205, upload-time = "2025-05-08T19:09:55.684Z" }, - { url = "https://files.pythonhosted.org/packages/54/51/a9f8e49af3883dacddb2da1af5fca1f7468677f1188936452dd9aaaeb9ed/matplotlib-3.10.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7c5f0283da91e9522bdba4d6583ed9d5521566f63729ffb68334f86d0bb98049", size = 8465823, upload-time = "2025-05-08T19:09:57.442Z" }, - { url = "https://files.pythonhosted.org/packages/e7/e3/c82963a3b86d6e6d5874cbeaa390166458a7f1961bab9feb14d3d1a10f02/matplotlib-3.10.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fdfa07c0ec58035242bc8b2c8aae37037c9a886370eef6850703d7583e19964b", size = 8606464, upload-time = "2025-05-08T19:09:59.471Z" }, - { url = "https://files.pythonhosted.org/packages/0e/34/24da1027e7fcdd9e82da3194c470143c551852757a4b473a09a012f5b945/matplotlib-3.10.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c0b9849a17bce080a16ebcb80a7b714b5677d0ec32161a2cc0a8e5a6030ae220", size = 9413103, upload-time = "2025-05-08T19:10:03.208Z" }, - { url = "https://files.pythonhosted.org/packages/a6/da/948a017c3ea13fd4a97afad5fdebe2f5bbc4d28c0654510ce6fd6b06b7bd/matplotlib-3.10.3-cp311-cp311-win_amd64.whl", hash = "sha256:eef6ed6c03717083bc6d69c2d7ee8624205c29a8e6ea5a31cd3492ecdbaee1e1", size = 8065492, upload-time = "2025-05-08T19:10:05.271Z" }, - { url = "https://files.pythonhosted.org/packages/eb/43/6b80eb47d1071f234ef0c96ca370c2ca621f91c12045f1401b5c9b28a639/matplotlib-3.10.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:0ab1affc11d1f495ab9e6362b8174a25afc19c081ba5b0775ef00533a4236eea", size = 8179689, upload-time = "2025-05-08T19:10:07.602Z" }, - { url = "https://files.pythonhosted.org/packages/0f/70/d61a591958325c357204870b5e7b164f93f2a8cca1dc6ce940f563909a13/matplotlib-3.10.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2a818d8bdcafa7ed2eed74487fdb071c09c1ae24152d403952adad11fa3c65b4", size = 8050466, upload-time = "2025-05-08T19:10:09.383Z" }, - { url = "https://files.pythonhosted.org/packages/e7/75/70c9d2306203148cc7902a961240c5927dd8728afedf35e6a77e105a2985/matplotlib-3.10.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:748ebc3470c253e770b17d8b0557f0aa85cf8c63fd52f1a61af5b27ec0b7ffee", size = 8456252, upload-time = "2025-05-08T19:10:11.958Z" }, - { url = "https://files.pythonhosted.org/packages/c4/91/ba0ae1ff4b3f30972ad01cd4a8029e70a0ec3b8ea5be04764b128b66f763/matplotlib-3.10.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed70453fd99733293ace1aec568255bc51c6361cb0da94fa5ebf0649fdb2150a", size = 8601321, upload-time = "2025-05-08T19:10:14.47Z" }, - { url = "https://files.pythonhosted.org/packages/d2/88/d636041eb54a84b889e11872d91f7cbf036b3b0e194a70fa064eb8b04f7a/matplotlib-3.10.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dbed9917b44070e55640bd13419de83b4c918e52d97561544814ba463811cbc7", size = 9406972, upload-time = "2025-05-08T19:10:16.569Z" }, - { url = "https://files.pythonhosted.org/packages/b1/79/0d1c165eac44405a86478082e225fce87874f7198300bbebc55faaf6d28d/matplotlib-3.10.3-cp312-cp312-win_amd64.whl", hash = "sha256:cf37d8c6ef1a48829443e8ba5227b44236d7fcaf7647caa3178a4ff9f7a5be05", size = 8067954, upload-time = "2025-05-08T19:10:18.663Z" }, - { url = "https://files.pythonhosted.org/packages/3d/d1/f54d43e95384b312ffa4a74a4326c722f3b8187aaaa12e9a84cdf3037131/matplotlib-3.10.3-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:86ab63d66bbc83fdb6733471d3bff40897c1e9921cba112accd748eee4bce5e4", size = 8162896, upload-time = "2025-05-08T19:10:46.432Z" }, - { url = "https://files.pythonhosted.org/packages/24/a4/fbfc00c2346177c95b353dcf9b5a004106abe8730a62cb6f27e79df0a698/matplotlib-3.10.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:a48f9c08bf7444b5d2391a83e75edb464ccda3c380384b36532a0962593a1751", size = 8039702, upload-time = "2025-05-08T19:10:49.634Z" }, - { url = "https://files.pythonhosted.org/packages/6a/b9/59e120d24a2ec5fc2d30646adb2efb4621aab3c6d83d66fb2a7a182db032/matplotlib-3.10.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb73d8aa75a237457988f9765e4dfe1c0d2453c5ca4eabc897d4309672c8e014", size = 8594298, upload-time = "2025-05-08T19:10:51.738Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/8a/76/d3c6e3a13fe484ebe7718d14e269c9569c4eb0020a968a327acb3b9a8fe6/matplotlib-3.10.8.tar.gz", hash = "sha256:2299372c19d56bcd35cf05a2738308758d32b9eaed2371898d8f5bd33f084aa3", size = 34806269, upload-time = "2025-12-10T22:56:51.155Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/58/be/a30bd917018ad220c400169fba298f2bb7003c8ccbc0c3e24ae2aacad1e8/matplotlib-3.10.8-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:00270d217d6b20d14b584c521f810d60c5c78406dc289859776550df837dcda7", size = 8239828, upload-time = "2025-12-10T22:55:02.313Z" }, + { url = "https://files.pythonhosted.org/packages/58/27/ca01e043c4841078e82cf6e80a6993dfecd315c3d79f5f3153afbb8e1ec6/matplotlib-3.10.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:37b3c1cc42aa184b3f738cfa18c1c1d72fd496d85467a6cf7b807936d39aa656", size = 8128050, upload-time = "2025-12-10T22:55:04.997Z" }, + { url = "https://files.pythonhosted.org/packages/cb/aa/7ab67f2b729ae6a91bcf9dcac0affb95fb8c56f7fd2b2af894ae0b0cf6fa/matplotlib-3.10.8-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ee40c27c795bda6a5292e9cff9890189d32f7e3a0bf04e0e3c9430c4a00c37df", size = 8700452, upload-time = "2025-12-10T22:55:07.47Z" }, + { url = "https://files.pythonhosted.org/packages/73/ae/2d5817b0acee3c49b7e7ccfbf5b273f284957cc8e270adf36375db353190/matplotlib-3.10.8-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a48f2b74020919552ea25d222d5cc6af9ca3f4eb43a93e14d068457f545c2a17", size = 9534928, upload-time = "2025-12-10T22:55:10.566Z" }, + { url = "https://files.pythonhosted.org/packages/c9/5b/8e66653e9f7c39cb2e5cab25fce4810daffa2bff02cbf5f3077cea9e942c/matplotlib-3.10.8-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f254d118d14a7f99d616271d6c3c27922c092dac11112670b157798b89bf4933", size = 9586377, upload-time = "2025-12-10T22:55:12.362Z" }, + { url = "https://files.pythonhosted.org/packages/e2/e2/fd0bbadf837f81edb0d208ba8f8cb552874c3b16e27cb91a31977d90875d/matplotlib-3.10.8-cp310-cp310-win_amd64.whl", hash = "sha256:f9b587c9c7274c1613a30afabf65a272114cd6cdbe67b3406f818c79d7ab2e2a", size = 8128127, upload-time = "2025-12-10T22:55:14.436Z" }, + { url = "https://files.pythonhosted.org/packages/f8/86/de7e3a1cdcfc941483af70609edc06b83e7c8a0e0dc9ac325200a3f4d220/matplotlib-3.10.8-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:6be43b667360fef5c754dda5d25a32e6307a03c204f3c0fc5468b78fa87b4160", size = 8251215, upload-time = "2025-12-10T22:55:16.175Z" }, + { url = "https://files.pythonhosted.org/packages/fd/14/baad3222f424b19ce6ad243c71de1ad9ec6b2e4eb1e458a48fdc6d120401/matplotlib-3.10.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a2b336e2d91a3d7006864e0990c83b216fcdca64b5a6484912902cef87313d78", size = 8139625, upload-time = "2025-12-10T22:55:17.712Z" }, + { url = "https://files.pythonhosted.org/packages/8f/a0/7024215e95d456de5883e6732e708d8187d9753a21d32f8ddb3befc0c445/matplotlib-3.10.8-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:efb30e3baaea72ce5928e32bab719ab4770099079d66726a62b11b1ef7273be4", size = 8712614, upload-time = "2025-12-10T22:55:20.8Z" }, + { url = "https://files.pythonhosted.org/packages/5a/f4/b8347351da9a5b3f41e26cf547252d861f685c6867d179a7c9d60ad50189/matplotlib-3.10.8-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d56a1efd5bfd61486c8bc968fa18734464556f0fb8e51690f4ac25d85cbbbbc2", size = 9540997, upload-time = "2025-12-10T22:55:23.258Z" }, + { url = "https://files.pythonhosted.org/packages/9e/c0/c7b914e297efe0bc36917bf216b2acb91044b91e930e878ae12981e461e5/matplotlib-3.10.8-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:238b7ce5717600615c895050239ec955d91f321c209dd110db988500558e70d6", size = 9596825, upload-time = "2025-12-10T22:55:25.217Z" }, + { url = "https://files.pythonhosted.org/packages/6f/d3/a4bbc01c237ab710a1f22b4da72f4ff6d77eb4c7735ea9811a94ae239067/matplotlib-3.10.8-cp311-cp311-win_amd64.whl", hash = "sha256:18821ace09c763ec93aef5eeff087ee493a24051936d7b9ebcad9662f66501f9", size = 8135090, upload-time = "2025-12-10T22:55:27.162Z" }, + { url = "https://files.pythonhosted.org/packages/89/dd/a0b6588f102beab33ca6f5218b31725216577b2a24172f327eaf6417d5c9/matplotlib-3.10.8-cp311-cp311-win_arm64.whl", hash = "sha256:bab485bcf8b1c7d2060b4fcb6fc368a9e6f4cd754c9c2fea281f4be21df394a2", size = 8012377, upload-time = "2025-12-10T22:55:29.185Z" }, + { url = "https://files.pythonhosted.org/packages/9e/67/f997cdcbb514012eb0d10cd2b4b332667997fb5ebe26b8d41d04962fa0e6/matplotlib-3.10.8-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:64fcc24778ca0404ce0cb7b6b77ae1f4c7231cdd60e6778f999ee05cbd581b9a", size = 8260453, upload-time = "2025-12-10T22:55:30.709Z" }, + { url = "https://files.pythonhosted.org/packages/7e/65/07d5f5c7f7c994f12c768708bd2e17a4f01a2b0f44a1c9eccad872433e2e/matplotlib-3.10.8-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b9a5ca4ac220a0cdd1ba6bcba3608547117d30468fefce49bb26f55c1a3d5c58", size = 8148321, upload-time = "2025-12-10T22:55:33.265Z" }, + { url = "https://files.pythonhosted.org/packages/3e/f3/c5195b1ae57ef85339fd7285dfb603b22c8b4e79114bae5f4f0fcf688677/matplotlib-3.10.8-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3ab4aabc72de4ff77b3ec33a6d78a68227bf1123465887f9905ba79184a1cc04", size = 8716944, upload-time = "2025-12-10T22:55:34.922Z" }, + { url = "https://files.pythonhosted.org/packages/00/f9/7638f5cc82ec8a7aa005de48622eecc3ed7c9854b96ba15bd76b7fd27574/matplotlib-3.10.8-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:24d50994d8c5816ddc35411e50a86ab05f575e2530c02752e02538122613371f", size = 9550099, upload-time = "2025-12-10T22:55:36.789Z" }, + { url = "https://files.pythonhosted.org/packages/57/61/78cd5920d35b29fd2a0fe894de8adf672ff52939d2e9b43cb83cd5ce1bc7/matplotlib-3.10.8-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:99eefd13c0dc3b3c1b4d561c1169e65fe47aab7b8158754d7c084088e2329466", size = 9613040, upload-time = "2025-12-10T22:55:38.715Z" }, + { url = "https://files.pythonhosted.org/packages/30/4e/c10f171b6e2f44d9e3a2b96efa38b1677439d79c99357600a62cc1e9594e/matplotlib-3.10.8-cp312-cp312-win_amd64.whl", hash = "sha256:dd80ecb295460a5d9d260df63c43f4afbdd832d725a531f008dad1664f458adf", size = 8142717, upload-time = "2025-12-10T22:55:41.103Z" }, + { url = "https://files.pythonhosted.org/packages/f1/76/934db220026b5fef85f45d51a738b91dea7d70207581063cd9bd8fafcf74/matplotlib-3.10.8-cp312-cp312-win_arm64.whl", hash = "sha256:3c624e43ed56313651bc18a47f838b60d7b8032ed348911c54906b130b20071b", size = 8012751, upload-time = "2025-12-10T22:55:42.684Z" }, + { url = "https://files.pythonhosted.org/packages/f5/43/31d59500bb950b0d188e149a2e552040528c13d6e3d6e84d0cccac593dcd/matplotlib-3.10.8-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:f97aeb209c3d2511443f8797e3e5a569aebb040d4f8bc79aa3ee78a8fb9e3dd8", size = 8237252, upload-time = "2025-12-10T22:56:39.529Z" }, + { url = "https://files.pythonhosted.org/packages/0c/2c/615c09984f3c5f907f51c886538ad785cf72e0e11a3225de2c0f9442aecc/matplotlib-3.10.8-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:fb061f596dad3a0f52b60dc6a5dec4a0c300dec41e058a7efe09256188d170b7", size = 8124693, upload-time = "2025-12-10T22:56:41.758Z" }, + { url = "https://files.pythonhosted.org/packages/91/e1/2757277a1c56041e1fc104b51a0f7b9a4afc8eb737865d63cababe30bc61/matplotlib-3.10.8-pp310-pypy310_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:12d90df9183093fcd479f4172ac26b322b1248b15729cb57f42f71f24c7e37a3", size = 8702205, upload-time = "2025-12-10T22:56:43.415Z" }, + { url = "https://files.pythonhosted.org/packages/04/30/3afaa31c757f34b7725ab9d2ba8b48b5e89c2019c003e7d0ead143aabc5a/matplotlib-3.10.8-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:6da7c2ce169267d0d066adcf63758f0604aa6c3eebf67458930f9d9b79ad1db1", size = 8249198, upload-time = "2025-12-10T22:56:45.584Z" }, + { url = "https://files.pythonhosted.org/packages/48/2f/6334aec331f57485a642a7c8be03cb286f29111ae71c46c38b363230063c/matplotlib-3.10.8-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:9153c3292705be9f9c64498a8872118540c3f4123d1a1c840172edf262c8be4a", size = 8136817, upload-time = "2025-12-10T22:56:47.339Z" }, + { url = "https://files.pythonhosted.org/packages/73/e4/6d6f14b2a759c622f191b2d67e9075a3f56aaccb3be4bb9bb6890030d0a0/matplotlib-3.10.8-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1ae029229a57cd1e8fe542485f27e7ca7b23aa9e8944ddb4985d0bc444f1eca2", size = 8713867, upload-time = "2025-12-10T22:56:48.954Z" }, ] [[package]] @@ -1624,11 +5534,9 @@ wheels = [ [[package]] name = "mike" -version = "2.1.3" +version = "2.1.4" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "importlib-metadata" }, - { name = "importlib-resources" }, { name = "jinja2" }, { name = "mkdocs" }, { name = "pyparsing" }, @@ -1636,9 +5544,9 @@ dependencies = [ { name = "pyyaml-env-tag" }, { name = "verspec" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ab/f7/2933f1a1fb0e0f077d5d6a92c6c7f8a54e6128241f116dff4df8b6050bbf/mike-2.1.3.tar.gz", hash = "sha256:abd79b8ea483fb0275b7972825d3082e5ae67a41820f8d8a0dc7a3f49944e810", size = 38119, upload-time = "2024-08-13T05:02:14.167Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ec/09/de1cab0018eb5f1fbd9dcc26b6e61f9453c5ec2eb790949d6ed75e1ffe55/mike-2.1.4.tar.gz", hash = "sha256:75d549420b134603805a65fc67f7dcd9fcd0ad1454fb2c893d9e844cba1aa6e4", size = 38190, upload-time = "2026-03-08T02:46:29.187Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/fd/1a/31b7cd6e4e7a02df4e076162e9783620777592bea9e4bb036389389af99d/mike-2.1.3-py3-none-any.whl", hash = "sha256:d90c64077e84f06272437b464735130d380703a76a5738b152932884c60c062a", size = 33754, upload-time = "2024-08-13T05:02:12.515Z" }, + { url = "https://files.pythonhosted.org/packages/48/f7/10f5e101db25741b91e4f4792c5d97b4fa834ead5cf509ae91097d939424/mike-2.1.4-py3-none-any.whl", hash = "sha256:39933e992e155dd70f2297e749a0ed78d8fd7942bc33a3666195d177758a280e", size = 33820, upload-time = "2026-03-08T02:46:28.149Z" }, ] [[package]] @@ -1646,7 +5554,7 @@ name = "mistune" version = "3.2.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "typing-extensions", marker = "python_full_version < '3.11' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "typing-extensions", marker = "python_full_version < '3.11' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, ] sdist = { url = "https://files.pythonhosted.org/packages/9d/55/d01f0c4b45ade6536c51170b9043db8b2ec6ddf4a35c7ea3f5f559ac935b/mistune-3.2.0.tar.gz", hash = "sha256:708487c8a8cdd99c9d90eb3ed4c3ed961246ff78ac82f03418f5183ab70e398a", size = 95467, upload-time = "2025-12-23T11:36:34.994Z" } wheels = [ @@ -1659,7 +5567,7 @@ version = "1.6.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "click" }, - { name = "colorama", marker = "sys_platform == 'win32' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "colorama", marker = "sys_platform == 'win32' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, { name = "ghp-import" }, { name = "jinja2" }, { name = "markdown" }, @@ -1679,42 +5587,43 @@ wheels = [ [[package]] name = "mkdocs-autorefs" -version = "1.4.3" +version = "1.4.4" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "markdown" }, { name = "markupsafe" }, { name = "mkdocs" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/51/fa/9124cd63d822e2bcbea1450ae68cdc3faf3655c69b455f3a7ed36ce6c628/mkdocs_autorefs-1.4.3.tar.gz", hash = "sha256:beee715b254455c4aa93b6ef3c67579c399ca092259cc41b7d9342573ff1fc75", size = 55425, upload-time = "2025-08-26T14:23:17.223Z" } +sdist = { url = "https://files.pythonhosted.org/packages/52/c0/f641843de3f612a6b48253f39244165acff36657a91cc903633d456ae1ac/mkdocs_autorefs-1.4.4.tar.gz", hash = "sha256:d54a284f27a7346b9c38f1f852177940c222da508e66edc816a0fa55fc6da197", size = 56588, upload-time = "2026-02-10T15:23:55.105Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/9f/4d/7123b6fa2278000688ebd338e2a06d16870aaf9eceae6ba047ea05f92df1/mkdocs_autorefs-1.4.3-py3-none-any.whl", hash = "sha256:469d85eb3114801d08e9cc55d102b3ba65917a869b893403b8987b601cf55dc9", size = 25034, upload-time = "2025-08-26T14:23:15.906Z" }, + { url = "https://files.pythonhosted.org/packages/28/de/a3e710469772c6a89595fc52816da05c1e164b4c866a89e3cb82fb1b67c5/mkdocs_autorefs-1.4.4-py3-none-any.whl", hash = "sha256:834ef5408d827071ad1bc69e0f39704fa34c7fc05bc8e1c72b227dfdc5c76089", size = 25530, upload-time = "2026-02-10T15:23:53.817Z" }, ] [[package]] name = "mkdocs-gen-files" -version = "0.6.0" +version = "0.6.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "mkdocs" }, + { name = "properdocs" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/61/35/f26349f7fa18414eb2e25d75a6fa9c7e3186c36e1d227c0b2d785a7bd5c4/mkdocs_gen_files-0.6.0.tar.gz", hash = "sha256:52022dc14dcc0451e05e54a8f5d5e7760351b6701eff816d1e9739577ec5635e", size = 8642, upload-time = "2025-11-23T12:13:22.124Z" } +sdist = { url = "https://files.pythonhosted.org/packages/43/43/428f312149c161cae557eecd35f3c4a82b867998b1d47fb29fdfe927be26/mkdocs_gen_files-0.6.1.tar.gz", hash = "sha256:57d7ff2229e23d077e46d14a33db6d37c8823f6ce1a503c874c1764a71679763", size = 8746, upload-time = "2026-03-16T23:26:09.31Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/8d/ec/72417415563c60ae01b36f0d497f1f4c803972f447ef4fb7f7746d6e07db/mkdocs_gen_files-0.6.0-py3-none-any.whl", hash = "sha256:815af15f3e2dbfda379629c1b95c02c8e6f232edf2a901186ea3b204ab1135b2", size = 8182, upload-time = "2025-11-23T12:13:20.756Z" }, + { url = "https://files.pythonhosted.org/packages/ee/1b/3075eb67fe66e19db059f0a25744c4e56978a309603a20e1d3353d545b5e/mkdocs_gen_files-0.6.1-py3-none-any.whl", hash = "sha256:b3182bfc6219e35b8d26658cb988368659d5d023aac30c2a819247558fc12189", size = 8282, upload-time = "2026-03-16T23:26:08.292Z" }, ] [[package]] name = "mkdocs-get-deps" -version = "0.2.0" +version = "0.2.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "mergedeep" }, { name = "platformdirs" }, { name = "pyyaml" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/98/f5/ed29cd50067784976f25ed0ed6fcd3c2ce9eb90650aa3b2796ddf7b6870b/mkdocs_get_deps-0.2.0.tar.gz", hash = "sha256:162b3d129c7fad9b19abfdcb9c1458a651628e4b1dea628ac68790fb3061c60c", size = 10239, upload-time = "2023-11-20T17:51:09.981Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ce/25/b3cccb187655b9393572bde9b09261d267c3bf2f2cdabe347673be5976a6/mkdocs_get_deps-0.2.2.tar.gz", hash = "sha256:8ee8d5f316cdbbb2834bc1df6e69c08fe769a83e040060de26d3c19fad3599a1", size = 11047, upload-time = "2026-03-10T02:46:33.632Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/9f/d4/029f984e8d3f3b6b726bd33cafc473b75e9e44c0f7e80a5b29abc466bdea/mkdocs_get_deps-0.2.0-py3-none-any.whl", hash = "sha256:2bf11d0b133e77a0dd036abeeb06dec8775e46efa526dc70667d8863eefc6134", size = 9521, upload-time = "2023-11-20T17:51:08.587Z" }, + { url = "https://files.pythonhosted.org/packages/88/29/744136411e785c4b0b744d5413e56555265939ab3a104c6a4b719dad33fd/mkdocs_get_deps-0.2.2-py3-none-any.whl", hash = "sha256:e7878cbeac04860b8b5e0ca31d3abad3df9411a75a32cde82f8e44b6c16ff650", size = 9555, upload-time = "2026-03-10T02:46:32.256Z" }, ] [[package]] @@ -1733,22 +5642,22 @@ wheels = [ [[package]] name = "mkdocs-git-revision-date-localized-plugin" -version = "1.5.0" +version = "1.5.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "babel" }, { name = "gitpython" }, { name = "mkdocs" }, - { name = "tzdata", marker = "sys_platform == 'win32' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "tzdata", marker = "sys_platform == 'win32' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/0f/c5/1d3c4e6ddae6230b89d09105cb79de711655e3ebd6745f7a92efea0f5160/mkdocs_git_revision_date_localized_plugin-1.5.0.tar.gz", hash = "sha256:17345ccfdf69a1905dc96fb1070dce82d03a1eb6b0d48f958081a7589ce3c248", size = 460697, upload-time = "2025-10-31T16:11:34.44Z" } +sdist = { url = "https://files.pythonhosted.org/packages/aa/16/25d7b1b930a802bf8b0c6ee64a9b34ea6e7d0a34c6bc69adbbb59b9d2f4b/mkdocs_git_revision_date_localized_plugin-1.5.1.tar.gz", hash = "sha256:2b0239455cd84784dd87ac8dfc9253fe4b2dd35e102696f21b5d34e2175981c6", size = 449557, upload-time = "2026-01-26T13:34:30.912Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/bc/51/fe0e3fdb16f6eed65c9459d12bae6a4e1f0bb4e2228cb037e7907b002678/mkdocs_git_revision_date_localized_plugin-1.5.0-py3-none-any.whl", hash = "sha256:933f9e35a8c135b113f21bb57610d82e9b7bcc71dd34fb06a029053c97e99656", size = 26153, upload-time = "2025-10-31T16:11:32.987Z" }, + { url = "https://files.pythonhosted.org/packages/4b/3f/4f663fb7e889fbb2fabef7a67ddd96f8355edca917aa724c6c6cda352d01/mkdocs_git_revision_date_localized_plugin-1.5.1-py3-none-any.whl", hash = "sha256:b00fd36ed0f9b2326b1488fd8fa31bf2ce64e68c4aa60a9ce857f10719571903", size = 26150, upload-time = "2026-01-26T13:34:28.768Z" }, ] [[package]] name = "mkdocs-jupyter" -version = "0.25.1" +version = "0.26.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "ipykernel" }, @@ -1758,26 +5667,27 @@ dependencies = [ { name = "nbconvert" }, { name = "pygments" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/6c/23/6ffb8d2fd2117aa860a04c6fe2510b21bc3c3c085907ffdd851caba53152/mkdocs_jupyter-0.25.1.tar.gz", hash = "sha256:0e9272ff4947e0ec683c92423a4bfb42a26477c103ab1a6ab8277e2dcc8f7afe", size = 1626747, upload-time = "2024-10-15T14:56:32.373Z" } +sdist = { url = "https://files.pythonhosted.org/packages/7f/d8/c146ea8cc36c3e812dd4c154513aa308614f35d2b4becec4b449165088f5/mkdocs_jupyter-0.26.1.tar.gz", hash = "sha256:7c80c0d3953de91e5b40a0d3209233795c8f800243ab298e4ec38e0504eda630", size = 1628270, upload-time = "2026-03-24T15:32:47.944Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/08/37/5f1fd5c3f6954b3256f8126275e62af493b96fb6aef6c0dbc4ee326032ad/mkdocs_jupyter-0.25.1-py3-none-any.whl", hash = "sha256:3f679a857609885d322880e72533ef5255561bbfdb13cfee2a1e92ef4d4ad8d8", size = 1456197, upload-time = "2024-10-15T14:56:29.854Z" }, + { url = "https://files.pythonhosted.org/packages/93/89/eb601278b12c471235860992f5973cf3c55ca3f77d1d6127389eb045a021/mkdocs_jupyter-0.26.1-py3-none-any.whl", hash = "sha256:527242c2c8f1d30970764bbab752de41243e5703f458d8bc05336ec53828192e", size = 1459618, upload-time = "2026-03-24T15:32:46.25Z" }, ] [[package]] name = "mkdocs-literate-nav" -version = "0.6.2" +version = "0.6.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "mkdocs" }, + { name = "properdocs" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f6/5f/99aa379b305cd1c2084d42db3d26f6de0ea9bf2cc1d10ed17f61aff35b9a/mkdocs_literate_nav-0.6.2.tar.gz", hash = "sha256:760e1708aa4be86af81a2b56e82c739d5a8388a0eab1517ecfd8e5aa40810a75", size = 17419, upload-time = "2025-03-18T21:53:09.711Z" } +sdist = { url = "https://files.pythonhosted.org/packages/01/af/dd3776a7a713f798f79bec7eb9c661d5cfb83ddc17d9a3667595e53e1559/mkdocs_literate_nav-0.6.3.tar.gz", hash = "sha256:edbaca22343f861fe4e34aac47d55a0c9955c640dbf02eea99fe631e914cf9ee", size = 17526, upload-time = "2026-03-16T23:26:50.688Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/8a/84/b5b14d2745e4dd1a90115186284e9ee1b4d0863104011ab46abb7355a1c3/mkdocs_literate_nav-0.6.2-py3-none-any.whl", hash = "sha256:0a6489a26ec7598477b56fa112056a5e3a6c15729f0214bea8a4dbc55bd5f630", size = 13261, upload-time = "2025-03-18T21:53:08.1Z" }, + { url = "https://files.pythonhosted.org/packages/4e/2c/bcf1ae903975ad6f169abb05c1eb0f94395478364deb89270cf034081b29/mkdocs_literate_nav-0.6.3-py3-none-any.whl", hash = "sha256:2c421561280fa9184f88cbf399bebbd4cc17ee507e978a31ce11fd6f3aabf233", size = 13355, upload-time = "2026-03-16T23:26:49.562Z" }, ] [[package]] name = "mkdocs-material" -version = "9.7.1" +version = "9.7.6" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "babel" }, @@ -1792,9 +5702,9 @@ dependencies = [ { name = "pymdown-extensions" }, { name = "requests" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/27/e2/2ffc356cd72f1473d07c7719d82a8f2cbd261666828614ecb95b12169f41/mkdocs_material-9.7.1.tar.gz", hash = "sha256:89601b8f2c3e6c6ee0a918cc3566cb201d40bf37c3cd3c2067e26fadb8cce2b8", size = 4094392, upload-time = "2025-12-18T09:49:00.308Z" } +sdist = { url = "https://files.pythonhosted.org/packages/45/29/6d2bcf41ae40802c4beda2432396fff97b8456fb496371d1bc7aad6512ec/mkdocs_material-9.7.6.tar.gz", hash = "sha256:00bdde50574f776d328b1862fe65daeaf581ec309bd150f7bff345a098c64a69", size = 4097959, upload-time = "2026-03-19T15:41:58.161Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3e/32/ed071cb721aca8c227718cffcf7bd539620e9799bbf2619e90c757bfd030/mkdocs_material-9.7.1-py3-none-any.whl", hash = "sha256:3f6100937d7d731f87f1e3e3b021c97f7239666b9ba1151ab476cabb96c60d5c", size = 9297166, upload-time = "2025-12-18T09:48:56.664Z" }, + { url = "https://files.pythonhosted.org/packages/2c/01/bc663630c510822c95c47a66af9fa7a443c295b47d5f041e5e6ae62ef659/mkdocs_material-9.7.6-py3-none-any.whl", hash = "sha256:71b84353921b8ea1ba84fe11c50912cc512da8fe0881038fcc9a0761c0e635ba", size = 9305470, upload-time = "2026-03-19T15:41:55.217Z" }, ] [package.optional-dependencies] @@ -1837,7 +5747,7 @@ dependencies = [ { name = "griffe" }, { name = "mkdocs-autorefs" }, { name = "mkdocstrings" }, - { name = "typing-extensions", marker = "python_full_version < '3.11' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "typing-extensions", marker = "python_full_version < '3.11' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, ] sdist = { url = "https://files.pythonhosted.org/packages/75/1c/3af8413919b0839b96a78f60e8bd0dfd26c844d3717eeb77f80b43f5be1c/mkdocstrings_python-1.19.0.tar.gz", hash = "sha256:917aac66cf121243c11db5b89f66b0ded6c53ec0de5318ff5e22424eb2f2e57c", size = 204010, upload-time = "2025-11-10T13:30:55.915Z" } wheels = [ @@ -1846,25 +5756,64 @@ wheels = [ [[package]] name = "ml-dtypes" -version = "0.5.1" +version = "0.5.4" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "numpy" }, + { name = "numpy", version = "1.26.4", source = { registry = "https://pypi.org/simple" }, marker = "extra == 'extra-16-inference-models-onnx-jp6-cu126' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and extra == 'extra-16-inference-models-falcon-perception') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "2.4.4", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and extra == 'extra-16-inference-models-falcon-perception') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0e/4a/c27b42ed9b1c7d13d9ba8b6905dece787d6259152f2309338aed29b2447b/ml_dtypes-0.5.4.tar.gz", hash = "sha256:8ab06a50fb9bf9666dd0fe5dfb4676fa2b0ac0f31ecff72a6c3af8e22c063453", size = 692314, upload-time = "2025-11-17T22:32:31.031Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fe/3a/c5b855752a70267ff729c349e650263adb3c206c29d28cc8ea7ace30a1d5/ml_dtypes-0.5.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b95e97e470fe60ed493fd9ae3911d8da4ebac16bd21f87ffa2b7c588bf22ea2c", size = 679735, upload-time = "2025-11-17T22:31:31.367Z" }, + { url = "https://files.pythonhosted.org/packages/41/79/7433f30ee04bd4faa303844048f55e1eb939131c8e5195a00a96a0939b64/ml_dtypes-0.5.4-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b4b801ebe0b477be666696bda493a9be8356f1f0057a57f1e35cd26928823e5a", size = 5051883, upload-time = "2025-11-17T22:31:33.658Z" }, + { url = "https://files.pythonhosted.org/packages/10/b1/8938e8830b0ee2e167fc75a094dea766a1152bde46752cd9bfc57ee78a82/ml_dtypes-0.5.4-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:388d399a2152dd79a3f0456a952284a99ee5c93d3e2f8dfe25977511e0515270", size = 5030369, upload-time = "2025-11-17T22:31:35.595Z" }, + { url = "https://files.pythonhosted.org/packages/c7/a3/51886727bd16e2f47587997b802dd56398692ce8c6c03c2e5bb32ecafe26/ml_dtypes-0.5.4-cp310-cp310-win_amd64.whl", hash = "sha256:4ff7f3e7ca2972e7de850e7b8fcbb355304271e2933dd90814c1cb847414d6e2", size = 210738, upload-time = "2025-11-17T22:31:37.43Z" }, + { url = "https://files.pythonhosted.org/packages/c6/5e/712092cfe7e5eb667b8ad9ca7c54442f21ed7ca8979745f1000e24cf8737/ml_dtypes-0.5.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6c7ecb74c4bd71db68a6bea1edf8da8c34f3d9fe218f038814fd1d310ac76c90", size = 679734, upload-time = "2025-11-17T22:31:39.223Z" }, + { url = "https://files.pythonhosted.org/packages/4f/cf/912146dfd4b5c0eea956836c01dcd2fce6c9c844b2691f5152aca196ce4f/ml_dtypes-0.5.4-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bc11d7e8c44a65115d05e2ab9989d1e045125d7be8e05a071a48bc76eb6d6040", size = 5056165, upload-time = "2025-11-17T22:31:41.071Z" }, + { url = "https://files.pythonhosted.org/packages/a9/80/19189ea605017473660e43762dc853d2797984b3c7bf30ce656099add30c/ml_dtypes-0.5.4-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:19b9a53598f21e453ea2fbda8aa783c20faff8e1eeb0d7ab899309a0053f1483", size = 5034975, upload-time = "2025-11-17T22:31:42.758Z" }, + { url = "https://files.pythonhosted.org/packages/b4/24/70bd59276883fdd91600ca20040b41efd4902a923283c4d6edcb1de128d2/ml_dtypes-0.5.4-cp311-cp311-win_amd64.whl", hash = "sha256:7c23c54a00ae43edf48d44066a7ec31e05fdc2eee0be2b8b50dd1903a1db94bb", size = 210742, upload-time = "2025-11-17T22:31:44.068Z" }, + { url = "https://files.pythonhosted.org/packages/a0/c9/64230ef14e40aa3f1cb254ef623bf812735e6bec7772848d19131111ac0d/ml_dtypes-0.5.4-cp311-cp311-win_arm64.whl", hash = "sha256:557a31a390b7e9439056644cb80ed0735a6e3e3bb09d67fd5687e4b04238d1de", size = 160709, upload-time = "2025-11-17T22:31:46.557Z" }, + { url = "https://files.pythonhosted.org/packages/a8/b8/3c70881695e056f8a32f8b941126cf78775d9a4d7feba8abcb52cb7b04f2/ml_dtypes-0.5.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:a174837a64f5b16cab6f368171a1a03a27936b31699d167684073ff1c4237dac", size = 676927, upload-time = "2025-11-17T22:31:48.182Z" }, + { url = "https://files.pythonhosted.org/packages/54/0f/428ef6881782e5ebb7eca459689448c0394fa0a80bea3aa9262cba5445ea/ml_dtypes-0.5.4-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a7f7c643e8b1320fd958bf098aa7ecf70623a42ec5154e3be3be673f4c34d900", size = 5028464, upload-time = "2025-11-17T22:31:50.135Z" }, + { url = "https://files.pythonhosted.org/packages/3a/cb/28ce52eb94390dda42599c98ea0204d74799e4d8047a0eb559b6fd648056/ml_dtypes-0.5.4-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9ad459e99793fa6e13bd5b7e6792c8f9190b4e5a1b45c63aba14a4d0a7f1d5ff", size = 5009002, upload-time = "2025-11-17T22:31:52.001Z" }, + { url = "https://files.pythonhosted.org/packages/f5/f0/0cfadd537c5470378b1b32bd859cf2824972174b51b873c9d95cfd7475a5/ml_dtypes-0.5.4-cp312-cp312-win_amd64.whl", hash = "sha256:c1a953995cccb9e25a4ae19e34316671e4e2edaebe4cf538229b1fc7109087b7", size = 212222, upload-time = "2025-11-17T22:31:53.742Z" }, + { url = "https://files.pythonhosted.org/packages/16/2e/9acc86985bfad8f2c2d30291b27cd2bb4c74cea08695bd540906ed744249/ml_dtypes-0.5.4-cp312-cp312-win_arm64.whl", hash = "sha256:9bad06436568442575beb2d03389aa7456c690a5b05892c471215bfd8cf39460", size = 160793, upload-time = "2025-11-17T22:31:55.358Z" }, +] + +[[package]] +name = "mlx" +version = "0.31.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mlx-metal", marker = "sys_platform == 'darwin' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130')" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/32/49/6e67c334872d2c114df3020e579f3718c333198f8312290e09ec0216703a/ml_dtypes-0.5.1.tar.gz", hash = "sha256:ac5b58559bb84a95848ed6984eb8013249f90b6bab62aa5acbad876e256002c9", size = 698772, upload-time = "2025-01-07T03:34:55.613Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f4/88/11ebdbc75445eeb5b6869b708a0d787d1ed812ff86c2170bbfb95febdce1/ml_dtypes-0.5.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:bd73f51957949069573ff783563486339a9285d72e2f36c18e0c1aa9ca7eb190", size = 671450, upload-time = "2025-01-07T03:33:52.724Z" }, - { url = "https://files.pythonhosted.org/packages/a4/a4/9321cae435d6140f9b0e7af8334456a854b60e3a9c6101280a16e3594965/ml_dtypes-0.5.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:810512e2eccdfc3b41eefa3a27402371a3411453a1efc7e9c000318196140fed", size = 4621075, upload-time = "2025-01-07T03:33:54.878Z" }, - { url = "https://files.pythonhosted.org/packages/16/d8/4502e12c6a10d42e13a552e8d97f20198e3cf82a0d1411ad50be56a5077c/ml_dtypes-0.5.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:141b2ea2f20bb10802ddca55d91fe21231ef49715cfc971998e8f2a9838f3dbe", size = 4738414, upload-time = "2025-01-07T03:33:57.709Z" }, - { url = "https://files.pythonhosted.org/packages/6b/7e/bc54ae885e4d702e60a4bf50aa9066ff35e9c66b5213d11091f6bffb3036/ml_dtypes-0.5.1-cp310-cp310-win_amd64.whl", hash = "sha256:26ebcc69d7b779c8f129393e99732961b5cc33fcff84090451f448c89b0e01b4", size = 209718, upload-time = "2025-01-07T03:34:00.585Z" }, - { url = "https://files.pythonhosted.org/packages/c9/fd/691335926126bb9beeb030b61a28f462773dcf16b8e8a2253b599013a303/ml_dtypes-0.5.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:023ce2f502efd4d6c1e0472cc58ce3640d051d40e71e27386bed33901e201327", size = 671448, upload-time = "2025-01-07T03:34:03.153Z" }, - { url = "https://files.pythonhosted.org/packages/ff/a6/63832d91f2feb250d865d069ba1a5d0c686b1f308d1c74ce9764472c5e22/ml_dtypes-0.5.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7000b6e4d8ef07542c05044ec5d8bbae1df083b3f56822c3da63993a113e716f", size = 4625792, upload-time = "2025-01-07T03:34:04.981Z" }, - { url = "https://files.pythonhosted.org/packages/cc/2a/5421fd3dbe6eef9b844cc9d05f568b9fb568503a2e51cb1eb4443d9fc56b/ml_dtypes-0.5.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c09526488c3a9e8b7a23a388d4974b670a9a3dd40c5c8a61db5593ce9b725bab", size = 4743893, upload-time = "2025-01-07T03:34:08.333Z" }, - { url = "https://files.pythonhosted.org/packages/60/30/d3f0fc9499a22801219679a7f3f8d59f1429943c6261f445fb4bfce20718/ml_dtypes-0.5.1-cp311-cp311-win_amd64.whl", hash = "sha256:15ad0f3b0323ce96c24637a88a6f44f6713c64032f27277b069f285c3cf66478", size = 209712, upload-time = "2025-01-07T03:34:12.182Z" }, - { url = "https://files.pythonhosted.org/packages/47/56/1bb21218e1e692506c220ffabd456af9733fba7aa1b14f73899979f4cc20/ml_dtypes-0.5.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:6f462f5eca22fb66d7ff9c4744a3db4463af06c49816c4b6ac89b16bfcdc592e", size = 670372, upload-time = "2025-01-07T03:34:15.258Z" }, - { url = "https://files.pythonhosted.org/packages/20/95/d8bd96a3b60e00bf31bd78ca4bdd2d6bbaf5acb09b42844432d719d34061/ml_dtypes-0.5.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f76232163b5b9c34291b54621ee60417601e2e4802a188a0ea7157cd9b323f4", size = 4635946, upload-time = "2025-01-07T03:34:20.412Z" }, - { url = "https://files.pythonhosted.org/packages/08/57/5d58fad4124192b1be42f68bd0c0ddaa26e44a730ff8c9337adade2f5632/ml_dtypes-0.5.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad4953c5eb9c25a56d11a913c2011d7e580a435ef5145f804d98efa14477d390", size = 4694804, upload-time = "2025-01-07T03:34:23.608Z" }, - { url = "https://files.pythonhosted.org/packages/38/bc/c4260e4a6c6bf684d0313308de1c860467275221d5e7daf69b3fcddfdd0b/ml_dtypes-0.5.1-cp312-cp312-win_amd64.whl", hash = "sha256:9626d0bca1fb387d5791ca36bacbba298c5ef554747b7ebeafefb4564fc83566", size = 210853, upload-time = "2025-01-07T03:34:26.027Z" }, + { url = "https://files.pythonhosted.org/packages/9b/f9/f1663dafd45af02467f4f41777c13ec34b9104b2b0450d870c3f906285cd/mlx-0.31.1-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:bc46c911cc060d2eaf21b9e24a1712dc56763b660b53631b9057a32ab1c0271a", size = 574137, upload-time = "2026-03-12T02:15:54.996Z" }, + { url = "https://files.pythonhosted.org/packages/c6/26/1fd632f537a5160a21475a70aaef252090c62f9629f45ad20f5acfe810f3/mlx-0.31.1-cp310-cp310-macosx_15_0_arm64.whl", hash = "sha256:fa132def5b3d959362077521c80f1fc80f64c45060d2940dc1d66a1aa19ce5f6", size = 574140, upload-time = "2026-03-12T02:15:56.709Z" }, + { url = "https://files.pythonhosted.org/packages/5c/c9/e790fa8ddc1b27fea7ba749699883f31c65e166b18e4598beab4574e4686/mlx-0.31.1-cp310-cp310-macosx_26_0_arm64.whl", hash = "sha256:877ff2f98debd035b922825a0d7e7e1be0959fc5ca1d24cb5020a23e510ff16d", size = 574124, upload-time = "2026-03-12T02:15:58.323Z" }, + { url = "https://files.pythonhosted.org/packages/b4/da/f7375fc2be05d026640c5ced085a9e71066a33100638e5762347dae5d680/mlx-0.31.1-cp310-cp310-manylinux_2_35_aarch64.whl", hash = "sha256:931c9316ec47b45ec0e737519f4f4c90eb69cbbdaaecadd6dd2ccdf1a85d4e61", size = 641428, upload-time = "2026-03-12T02:15:59.743Z" }, + { url = "https://files.pythonhosted.org/packages/1c/3f/ab060661d966d435e41212d4f6d6e9d1202da8b9043b1c18c343ab7d1b08/mlx-0.31.1-cp310-cp310-manylinux_2_35_x86_64.whl", hash = "sha256:dec00ce7b094d6bc2876996291fd76c9e28326bc1a9853440903f2a06946ce1f", size = 674521, upload-time = "2026-03-12T02:16:01.057Z" }, + { url = "https://files.pythonhosted.org/packages/75/32/25dc2eae1d6f867224ef2bca2c644e3e913fe8067991f8394c090b720e3e/mlx-0.31.1-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:8863835fb36c7c4f65008b1426ddb9ff7931a13c975e0ef58a40002ae8048922", size = 574311, upload-time = "2026-03-12T02:16:02.651Z" }, + { url = "https://files.pythonhosted.org/packages/9b/bf/c5aa1d1154f5a216139c8162cd3e6568b7eb427390d655f7f5ae3a1a61e7/mlx-0.31.1-cp311-cp311-macosx_15_0_arm64.whl", hash = "sha256:0de504c1f1fe73b32fc3cf457b8eac30d1f7ce22440ef075c1970f96712e6fff", size = 574312, upload-time = "2026-03-12T02:16:04.231Z" }, + { url = "https://files.pythonhosted.org/packages/3a/88/ef57747552c9e9da0c28465d9266c05a0009b698d90fb0bc63eb81840b8d/mlx-0.31.1-cp311-cp311-macosx_26_0_arm64.whl", hash = "sha256:10715b895e1f3e984c2c54257b7db956ff8af1fa93255412794a3724fe2dd3b1", size = 574385, upload-time = "2026-03-12T02:16:05.528Z" }, + { url = "https://files.pythonhosted.org/packages/ac/51/dbea4bbe7a2e4cd05226965b34198d49459cfaef8b9b37b72f006a9811ab/mlx-0.31.1-cp311-cp311-manylinux_2_35_aarch64.whl", hash = "sha256:d065625ab3101adcd7f5824297243fe40a0615099a06f5597ab67284483aa2f8", size = 641347, upload-time = "2026-03-12T02:16:07.013Z" }, + { url = "https://files.pythonhosted.org/packages/c5/86/3db98e8805637fb56f078311d622e9500f5c9088f6d79a6e304ec8235b47/mlx-0.31.1-cp311-cp311-manylinux_2_35_x86_64.whl", hash = "sha256:b2cf8502d9d64dc6851034fcd4a656cbb26be20c36f190f2971f4ac0caed89cb", size = 674769, upload-time = "2026-03-12T02:16:08.51Z" }, + { url = "https://files.pythonhosted.org/packages/38/29/71fe1f68756f515856e6930973c23245810d4aa3cd22fddd719d86a709dc/mlx-0.31.1-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:8a63b31a398c9519f2bb0c81cf3865d9baca4ff573ffc31ead465d18286184e8", size = 574308, upload-time = "2026-03-12T02:16:10.256Z" }, + { url = "https://files.pythonhosted.org/packages/21/be/70654a2cee0d71fd10bd237a50a79d06ae51679a194db6a3b16c0c84e6a5/mlx-0.31.1-cp312-cp312-macosx_15_0_arm64.whl", hash = "sha256:a7a9347df4dcc41f0d16ff70b65650820af4879f686534b233b16826a22afa00", size = 574309, upload-time = "2026-03-12T02:16:11.577Z" }, + { url = "https://files.pythonhosted.org/packages/ad/69/c7bc7b04f76b0cbd678f328011d1634bd0bcfc2da45aba06e084cb031127/mlx-0.31.1-cp312-cp312-macosx_26_0_arm64.whl", hash = "sha256:6cdb797ea31787d1ce9e5be77991c4bd5cbf129ab15f7253b78e09737f535fce", size = 574289, upload-time = "2026-03-12T02:16:13.146Z" }, + { url = "https://files.pythonhosted.org/packages/55/f7/dcc129228faab4d406041d91413c5999250ab79da6fe5417ac84f1616ff1/mlx-0.31.1-cp312-cp312-manylinux_2_35_aarch64.whl", hash = "sha256:1ed1991c8e39f841d5756c0c543beb819763a2f80fba3f4b150bc6cad4d973de", size = 626439, upload-time = "2026-03-12T02:16:14.741Z" }, + { url = "https://files.pythonhosted.org/packages/90/1d/8b32e46ea98ab5c1c15cf1b37ac97af651977f84e72e1800412a700c51d9/mlx-0.31.1-cp312-cp312-manylinux_2_35_x86_64.whl", hash = "sha256:195c5cb27328380287c0ffe9ef48f860ab75ec5d3dfce153d475dc2c99369708", size = 668679, upload-time = "2026-03-12T02:16:16.012Z" }, +] + +[[package]] +name = "mlx-metal" +version = "0.31.1" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/39/66/2313497fdbc7fbadf8e026c09366e3f049f9114e65ca4edc23cdb8699186/mlx_metal-0.31.1-py3-none-macosx_14_0_arm64.whl", hash = "sha256:70741174131dbf7fdd479cb730e06e08c358eac3bf7905d9e884e7960cfdd5b8", size = 38624074, upload-time = "2026-03-12T02:15:48.036Z" }, + { url = "https://files.pythonhosted.org/packages/c7/34/4c3c6890ce6095b2ab2ba2f5f15c9a7ba17208d47f8cacb572885a2dc0eb/mlx_metal-0.31.1-py3-none-macosx_15_0_arm64.whl", hash = "sha256:6c56bd8cd27743e635f5a90a22535af7c31bd22b4b126d46b6da2da52d72e413", size = 38618950, upload-time = "2026-03-12T02:15:51.908Z" }, + { url = "https://files.pythonhosted.org/packages/51/bc/987cb99e3aafb296aa11ce5133838a10eae8447edd53168d0804d4fb3a14/mlx_metal-0.31.1-py3-none-macosx_26_0_arm64.whl", hash = "sha256:e7324b7c56b519ae67c025d3ced07e5d35bc3a9f19d4c45fe4927f385148c59e", size = 49256543, upload-time = "2026-03-12T02:15:54.851Z" }, ] [[package]] @@ -1876,6 +5825,93 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/43/e3/7d92a15f894aa0c9c4b49b8ee9ac9850d6e63b03c9c32c0367a13ae62209/mpmath-1.3.0-py3-none-any.whl", hash = "sha256:a0b2b9fe80bbcd81a6647ff13108738cfb482d481d826cc0e02f5b35e5c88d2c", size = 536198, upload-time = "2023-03-07T16:47:09.197Z" }, ] +[[package]] +name = "multidict" +version = "6.7.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions", marker = "python_full_version < '3.11' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130')" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/1a/c2/c2d94cbe6ac1753f3fc980da97b3d930efe1da3af3c9f5125354436c073d/multidict-6.7.1.tar.gz", hash = "sha256:ec6652a1bee61c53a3e5776b6049172c53b6aaba34f18c9ad04f82712bac623d", size = 102010, upload-time = "2026-01-26T02:46:45.979Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/84/0b/19348d4c98980c4851d2f943f8ebafdece2ae7ef737adcfa5994ce8e5f10/multidict-6.7.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:c93c3db7ea657dd4637d57e74ab73de31bccefe144d3d4ce370052035bc85fb5", size = 77176, upload-time = "2026-01-26T02:42:59.784Z" }, + { url = "https://files.pythonhosted.org/packages/ef/04/9de3f8077852e3d438215c81e9b691244532d2e05b4270e89ce67b7d103c/multidict-6.7.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:974e72a2474600827abaeda71af0c53d9ebbc3c2eb7da37b37d7829ae31232d8", size = 44996, upload-time = "2026-01-26T02:43:01.674Z" }, + { url = "https://files.pythonhosted.org/packages/31/5c/08c7f7fe311f32e83f7621cd3f99d805f45519cd06fafb247628b861da7d/multidict-6.7.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cdea2e7b2456cfb6694fb113066fd0ec7ea4d67e3a35e1f4cbeea0b448bf5872", size = 44631, upload-time = "2026-01-26T02:43:03.169Z" }, + { url = "https://files.pythonhosted.org/packages/b7/7f/0e3b1390ae772f27501199996b94b52ceeb64fe6f9120a32c6c3f6b781be/multidict-6.7.1-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:17207077e29342fdc2c9a82e4b306f1127bf1ea91f8b71e02d4798a70bb99991", size = 242561, upload-time = "2026-01-26T02:43:04.733Z" }, + { url = "https://files.pythonhosted.org/packages/dd/f4/8719f4f167586af317b69dd3e90f913416c91ca610cac79a45c53f590312/multidict-6.7.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d4f49cb5661344764e4c7c7973e92a47a59b8fc19b6523649ec9dc4960e58a03", size = 242223, upload-time = "2026-01-26T02:43:06.695Z" }, + { url = "https://files.pythonhosted.org/packages/47/ab/7c36164cce64a6ad19c6d9a85377b7178ecf3b89f8fd589c73381a5eedfd/multidict-6.7.1-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a9fc4caa29e2e6ae408d1c450ac8bf19892c5fca83ee634ecd88a53332c59981", size = 222322, upload-time = "2026-01-26T02:43:08.472Z" }, + { url = "https://files.pythonhosted.org/packages/f5/79/a25add6fb38035b5337bc5734f296d9afc99163403bbcf56d4170f97eb62/multidict-6.7.1-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c5f0c21549ab432b57dcc82130f388d84ad8179824cc3f223d5e7cfbfd4143f6", size = 254005, upload-time = "2026-01-26T02:43:10.127Z" }, + { url = "https://files.pythonhosted.org/packages/4a/7b/64a87cf98e12f756fc8bd444b001232ffff2be37288f018ad0d3f0aae931/multidict-6.7.1-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:7dfb78d966b2c906ae1d28ccf6e6712a3cd04407ee5088cd276fe8cb42186190", size = 251173, upload-time = "2026-01-26T02:43:11.731Z" }, + { url = "https://files.pythonhosted.org/packages/4b/ac/b605473de2bb404e742f2cc3583d12aedb2352a70e49ae8fce455b50c5aa/multidict-6.7.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9b0d9b91d1aa44db9c1f1ecd0d9d2ae610b2f4f856448664e01a3b35899f3f92", size = 243273, upload-time = "2026-01-26T02:43:13.063Z" }, + { url = "https://files.pythonhosted.org/packages/03/65/11492d6a0e259783720f3bc1d9ea55579a76f1407e31ed44045c99542004/multidict-6.7.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:dd96c01a9dcd4889dcfcf9eb5544ca0c77603f239e3ffab0524ec17aea9a93ee", size = 238956, upload-time = "2026-01-26T02:43:14.843Z" }, + { url = "https://files.pythonhosted.org/packages/5f/a7/7ee591302af64e7c196fb63fe856c788993c1372df765102bd0448e7e165/multidict-6.7.1-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:067343c68cd6612d375710f895337b3a98a033c94f14b9a99eff902f205424e2", size = 233477, upload-time = "2026-01-26T02:43:16.025Z" }, + { url = "https://files.pythonhosted.org/packages/9c/99/c109962d58756c35fd9992fed7f2355303846ea2ff054bb5f5e9d6b888de/multidict-6.7.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:5884a04f4ff56c6120f6ccf703bdeb8b5079d808ba604d4d53aec0d55dc33568", size = 243615, upload-time = "2026-01-26T02:43:17.84Z" }, + { url = "https://files.pythonhosted.org/packages/d5/5f/1973e7c771c86e93dcfe1c9cc55a5481b610f6614acfc28c0d326fe6bfad/multidict-6.7.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:8affcf1c98b82bc901702eb73b6947a1bfa170823c153fe8a47b5f5f02e48e40", size = 249930, upload-time = "2026-01-26T02:43:19.06Z" }, + { url = "https://files.pythonhosted.org/packages/5d/a5/f170fc2268c3243853580203378cd522446b2df632061e0a5409817854c7/multidict-6.7.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:0d17522c37d03e85c8098ec8431636309b2682cf12e58f4dbc76121fb50e4962", size = 243807, upload-time = "2026-01-26T02:43:20.286Z" }, + { url = "https://files.pythonhosted.org/packages/de/01/73856fab6d125e5bc652c3986b90e8699a95e84b48d72f39ade6c0e74a8c/multidict-6.7.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:24c0cf81544ca5e17cfcb6e482e7a82cd475925242b308b890c9452a074d4505", size = 239103, upload-time = "2026-01-26T02:43:21.508Z" }, + { url = "https://files.pythonhosted.org/packages/e7/46/f1220bd9944d8aa40d8ccff100eeeee19b505b857b6f603d6078cb5315b0/multidict-6.7.1-cp310-cp310-win32.whl", hash = "sha256:d82dd730a95e6643802f4454b8fdecdf08667881a9c5670db85bc5a56693f122", size = 41416, upload-time = "2026-01-26T02:43:22.703Z" }, + { url = "https://files.pythonhosted.org/packages/68/00/9b38e272a770303692fc406c36e1a4c740f401522d5787691eb38a8925a8/multidict-6.7.1-cp310-cp310-win_amd64.whl", hash = "sha256:cf37cbe5ced48d417ba045aca1b21bafca67489452debcde94778a576666a1df", size = 46022, upload-time = "2026-01-26T02:43:23.77Z" }, + { url = "https://files.pythonhosted.org/packages/64/65/d8d42490c02ee07b6bbe00f7190d70bb4738b3cce7629aaf9f213ef730dd/multidict-6.7.1-cp310-cp310-win_arm64.whl", hash = "sha256:59bc83d3f66b41dac1e7460aac1d196edc70c9ba3094965c467715a70ecb46db", size = 43238, upload-time = "2026-01-26T02:43:24.882Z" }, + { url = "https://files.pythonhosted.org/packages/ce/f1/a90635c4f88fb913fbf4ce660b83b7445b7a02615bda034b2f8eb38fd597/multidict-6.7.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7ff981b266af91d7b4b3793ca3382e53229088d193a85dfad6f5f4c27fc73e5d", size = 76626, upload-time = "2026-01-26T02:43:26.485Z" }, + { url = "https://files.pythonhosted.org/packages/a6/9b/267e64eaf6fc637a15b35f5de31a566634a2740f97d8d094a69d34f524a4/multidict-6.7.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:844c5bca0b5444adb44a623fb0a1310c2f4cd41f402126bb269cd44c9b3f3e1e", size = 44706, upload-time = "2026-01-26T02:43:27.607Z" }, + { url = "https://files.pythonhosted.org/packages/dd/a4/d45caf2b97b035c57267791ecfaafbd59c68212004b3842830954bb4b02e/multidict-6.7.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f2a0a924d4c2e9afcd7ec64f9de35fcd96915149b2216e1cb2c10a56df483855", size = 44356, upload-time = "2026-01-26T02:43:28.661Z" }, + { url = "https://files.pythonhosted.org/packages/fd/d2/0a36c8473f0cbaeadd5db6c8b72d15bbceeec275807772bfcd059bef487d/multidict-6.7.1-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:8be1802715a8e892c784c0197c2ace276ea52702a0ede98b6310c8f255a5afb3", size = 244355, upload-time = "2026-01-26T02:43:31.165Z" }, + { url = "https://files.pythonhosted.org/packages/5d/16/8c65be997fd7dd311b7d39c7b6e71a0cb449bad093761481eccbbe4b42a2/multidict-6.7.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2e2d2ed645ea29f31c4c7ea1552fcfd7cb7ba656e1eafd4134a6620c9f5fdd9e", size = 246433, upload-time = "2026-01-26T02:43:32.581Z" }, + { url = "https://files.pythonhosted.org/packages/01/fb/4dbd7e848d2799c6a026ec88ad39cf2b8416aa167fcc903baa55ecaa045c/multidict-6.7.1-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:95922cee9a778659e91db6497596435777bd25ed116701a4c034f8e46544955a", size = 225376, upload-time = "2026-01-26T02:43:34.417Z" }, + { url = "https://files.pythonhosted.org/packages/b6/8a/4a3a6341eac3830f6053062f8fbc9a9e54407c80755b3f05bc427295c2d0/multidict-6.7.1-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:6b83cabdc375ffaaa15edd97eb7c0c672ad788e2687004990074d7d6c9b140c8", size = 257365, upload-time = "2026-01-26T02:43:35.741Z" }, + { url = "https://files.pythonhosted.org/packages/f7/a2/dd575a69c1aa206e12d27d0770cdf9b92434b48a9ef0cd0d1afdecaa93c4/multidict-6.7.1-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:38fb49540705369bab8484db0689d86c0a33a0a9f2c1b197f506b71b4b6c19b0", size = 254747, upload-time = "2026-01-26T02:43:36.976Z" }, + { url = "https://files.pythonhosted.org/packages/5a/56/21b27c560c13822ed93133f08aa6372c53a8e067f11fbed37b4adcdac922/multidict-6.7.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:439cbebd499f92e9aa6793016a8acaa161dfa749ae86d20960189f5398a19144", size = 246293, upload-time = "2026-01-26T02:43:38.258Z" }, + { url = "https://files.pythonhosted.org/packages/5a/a4/23466059dc3854763423d0ad6c0f3683a379d97673b1b89ec33826e46728/multidict-6.7.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6d3bc717b6fe763b8be3f2bee2701d3c8eb1b2a8ae9f60910f1b2860c82b6c49", size = 242962, upload-time = "2026-01-26T02:43:40.034Z" }, + { url = "https://files.pythonhosted.org/packages/1f/67/51dd754a3524d685958001e8fa20a0f5f90a6a856e0a9dcabff69be3dbb7/multidict-6.7.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:619e5a1ac57986dbfec9f0b301d865dddf763696435e2962f6d9cf2fdff2bb71", size = 237360, upload-time = "2026-01-26T02:43:41.752Z" }, + { url = "https://files.pythonhosted.org/packages/64/3f/036dfc8c174934d4b55d86ff4f978e558b0e585cef70cfc1ad01adc6bf18/multidict-6.7.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:0b38ebffd9be37c1170d33bc0f36f4f262e0a09bc1aac1c34c7aa51a7293f0b3", size = 245940, upload-time = "2026-01-26T02:43:43.042Z" }, + { url = "https://files.pythonhosted.org/packages/3d/20/6214d3c105928ebc353a1c644a6ef1408bc5794fcb4f170bb524a3c16311/multidict-6.7.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:10ae39c9cfe6adedcdb764f5e8411d4a92b055e35573a2eaa88d3323289ef93c", size = 253502, upload-time = "2026-01-26T02:43:44.371Z" }, + { url = "https://files.pythonhosted.org/packages/b1/e2/c653bc4ae1be70a0f836b82172d643fcf1dade042ba2676ab08ec08bff0f/multidict-6.7.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:25167cc263257660290fba06b9318d2026e3c910be240a146e1f66dd114af2b0", size = 247065, upload-time = "2026-01-26T02:43:45.745Z" }, + { url = "https://files.pythonhosted.org/packages/c8/11/a854b4154cd3bd8b1fd375e8a8ca9d73be37610c361543d56f764109509b/multidict-6.7.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:128441d052254f42989ef98b7b6a6ecb1e6f708aa962c7984235316db59f50fa", size = 241870, upload-time = "2026-01-26T02:43:47.054Z" }, + { url = "https://files.pythonhosted.org/packages/13/bf/9676c0392309b5fdae322333d22a829715b570edb9baa8016a517b55b558/multidict-6.7.1-cp311-cp311-win32.whl", hash = "sha256:d62b7f64ffde3b99d06b707a280db04fb3855b55f5a06df387236051d0668f4a", size = 41302, upload-time = "2026-01-26T02:43:48.753Z" }, + { url = "https://files.pythonhosted.org/packages/c9/68/f16a3a8ba6f7b6dc92a1f19669c0810bd2c43fc5a02da13b1cbf8e253845/multidict-6.7.1-cp311-cp311-win_amd64.whl", hash = "sha256:bdbf9f3b332abd0cdb306e7c2113818ab1e922dc84b8f8fd06ec89ed2a19ab8b", size = 45981, upload-time = "2026-01-26T02:43:49.921Z" }, + { url = "https://files.pythonhosted.org/packages/ac/ad/9dd5305253fa00cd3c7555dbef69d5bf4133debc53b87ab8d6a44d411665/multidict-6.7.1-cp311-cp311-win_arm64.whl", hash = "sha256:b8c990b037d2fff2f4e33d3f21b9b531c5745b33a49a7d6dbe7a177266af44f6", size = 43159, upload-time = "2026-01-26T02:43:51.635Z" }, + { url = "https://files.pythonhosted.org/packages/8d/9c/f20e0e2cf80e4b2e4b1c365bf5fe104ee633c751a724246262db8f1a0b13/multidict-6.7.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:a90f75c956e32891a4eda3639ce6dd86e87105271f43d43442a3aedf3cddf172", size = 76893, upload-time = "2026-01-26T02:43:52.754Z" }, + { url = "https://files.pythonhosted.org/packages/fe/cf/18ef143a81610136d3da8193da9d80bfe1cb548a1e2d1c775f26b23d024a/multidict-6.7.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:3fccb473e87eaa1382689053e4a4618e7ba7b9b9b8d6adf2027ee474597128cd", size = 45456, upload-time = "2026-01-26T02:43:53.893Z" }, + { url = "https://files.pythonhosted.org/packages/a9/65/1caac9d4cd32e8433908683446eebc953e82d22b03d10d41a5f0fefe991b/multidict-6.7.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b0fa96985700739c4c7853a43c0b3e169360d6855780021bfc6d0f1ce7c123e7", size = 43872, upload-time = "2026-01-26T02:43:55.041Z" }, + { url = "https://files.pythonhosted.org/packages/cf/3b/d6bd75dc4f3ff7c73766e04e705b00ed6dbbaccf670d9e05a12b006f5a21/multidict-6.7.1-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:cb2a55f408c3043e42b40cc8eecd575afa27b7e0b956dfb190de0f8499a57a53", size = 251018, upload-time = "2026-01-26T02:43:56.198Z" }, + { url = "https://files.pythonhosted.org/packages/fd/80/c959c5933adedb9ac15152e4067c702a808ea183a8b64cf8f31af8ad3155/multidict-6.7.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:eb0ce7b2a32d09892b3dd6cc44877a0d02a33241fafca5f25c8b6b62374f8b75", size = 258883, upload-time = "2026-01-26T02:43:57.499Z" }, + { url = "https://files.pythonhosted.org/packages/86/85/7ed40adafea3d4f1c8b916e3b5cc3a8e07dfcdcb9cd72800f4ed3ca1b387/multidict-6.7.1-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:c3a32d23520ee37bf327d1e1a656fec76a2edd5c038bf43eddfa0572ec49c60b", size = 242413, upload-time = "2026-01-26T02:43:58.755Z" }, + { url = "https://files.pythonhosted.org/packages/d2/57/b8565ff533e48595503c785f8361ff9a4fde4d67de25c207cd0ba3befd03/multidict-6.7.1-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:9c90fed18bffc0189ba814749fdcc102b536e83a9f738a9003e569acd540a733", size = 268404, upload-time = "2026-01-26T02:44:00.216Z" }, + { url = "https://files.pythonhosted.org/packages/e0/50/9810c5c29350f7258180dfdcb2e52783a0632862eb334c4896ac717cebcb/multidict-6.7.1-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:da62917e6076f512daccfbbde27f46fed1c98fee202f0559adec8ee0de67f71a", size = 269456, upload-time = "2026-01-26T02:44:02.202Z" }, + { url = "https://files.pythonhosted.org/packages/f3/8d/5e5be3ced1d12966fefb5c4ea3b2a5b480afcea36406559442c6e31d4a48/multidict-6.7.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bfde23ef6ed9db7eaee6c37dcec08524cb43903c60b285b172b6c094711b3961", size = 256322, upload-time = "2026-01-26T02:44:03.56Z" }, + { url = "https://files.pythonhosted.org/packages/31/6e/d8a26d81ac166a5592782d208dd90dfdc0a7a218adaa52b45a672b46c122/multidict-6.7.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3758692429e4e32f1ba0df23219cd0b4fc0a52f476726fff9337d1a57676a582", size = 253955, upload-time = "2026-01-26T02:44:04.845Z" }, + { url = "https://files.pythonhosted.org/packages/59/4c/7c672c8aad41534ba619bcd4ade7a0dc87ed6b8b5c06149b85d3dd03f0cd/multidict-6.7.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:398c1478926eca669f2fd6a5856b6de9c0acf23a2cb59a14c0ba5844fa38077e", size = 251254, upload-time = "2026-01-26T02:44:06.133Z" }, + { url = "https://files.pythonhosted.org/packages/7b/bd/84c24de512cbafbdbc39439f74e967f19570ce7924e3007174a29c348916/multidict-6.7.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:c102791b1c4f3ab36ce4101154549105a53dc828f016356b3e3bcae2e3a039d3", size = 252059, upload-time = "2026-01-26T02:44:07.518Z" }, + { url = "https://files.pythonhosted.org/packages/fa/ba/f5449385510825b73d01c2d4087bf6d2fccc20a2d42ac34df93191d3dd03/multidict-6.7.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:a088b62bd733e2ad12c50dad01b7d0166c30287c166e137433d3b410add807a6", size = 263588, upload-time = "2026-01-26T02:44:09.382Z" }, + { url = "https://files.pythonhosted.org/packages/d7/11/afc7c677f68f75c84a69fe37184f0f82fce13ce4b92f49f3db280b7e92b3/multidict-6.7.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:3d51ff4785d58d3f6c91bdbffcb5e1f7ddfda557727043aa20d20ec4f65e324a", size = 259642, upload-time = "2026-01-26T02:44:10.73Z" }, + { url = "https://files.pythonhosted.org/packages/2b/17/ebb9644da78c4ab36403739e0e6e0e30ebb135b9caf3440825001a0bddcb/multidict-6.7.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fc5907494fccf3e7d3f94f95c91d6336b092b5fc83811720fae5e2765890dfba", size = 251377, upload-time = "2026-01-26T02:44:12.042Z" }, + { url = "https://files.pythonhosted.org/packages/ca/a4/840f5b97339e27846c46307f2530a2805d9d537d8b8bd416af031cad7fa0/multidict-6.7.1-cp312-cp312-win32.whl", hash = "sha256:28ca5ce2fd9716631133d0e9a9b9a745ad7f60bac2bccafb56aa380fc0b6c511", size = 41887, upload-time = "2026-01-26T02:44:14.245Z" }, + { url = "https://files.pythonhosted.org/packages/80/31/0b2517913687895f5904325c2069d6a3b78f66cc641a86a2baf75a05dcbb/multidict-6.7.1-cp312-cp312-win_amd64.whl", hash = "sha256:fcee94dfbd638784645b066074b338bc9cc155d4b4bffa4adce1615c5a426c19", size = 46053, upload-time = "2026-01-26T02:44:15.371Z" }, + { url = "https://files.pythonhosted.org/packages/0c/5b/aba28e4ee4006ae4c7df8d327d31025d760ffa992ea23812a601d226e682/multidict-6.7.1-cp312-cp312-win_arm64.whl", hash = "sha256:ba0a9fb644d0c1a2194cf7ffb043bd852cea63a57f66fbd33959f7dae18517bf", size = 43307, upload-time = "2026-01-26T02:44:16.852Z" }, + { url = "https://files.pythonhosted.org/packages/81/08/7036c080d7117f28a4af526d794aab6a84463126db031b007717c1a6676e/multidict-6.7.1-py3-none-any.whl", hash = "sha256:55d97cc6dae627efa6a6e548885712d4864b81110ac76fa4e534c03819fa4a56", size = 12319, upload-time = "2026-01-26T02:46:44.004Z" }, +] + +[[package]] +name = "multiprocess" +version = "0.70.19" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "dill" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a2/f2/e783ac7f2aeeed14e9e12801f22529cc7e6b7ab80928d6dcce4e9f00922d/multiprocess-0.70.19.tar.gz", hash = "sha256:952021e0e6c55a4a9fe4cd787895b86e239a40e76802a789d6305398d3975897", size = 2079989, upload-time = "2026-01-19T06:47:39.744Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8b/b6/10832f96b499690854e574360be342a282f5f7dba58eff791299ff6c0637/multiprocess-0.70.19-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:02e5c35d7d6cd2bdc89c1858867f7bde4012837411023a4696c148c1bdd7c80e", size = 135131, upload-time = "2026-01-19T06:47:20.479Z" }, + { url = "https://files.pythonhosted.org/packages/99/50/faef2d8106534b0dc4a0b772668a1a99682696ebf17d3c0f13f2ed6a656a/multiprocess-0.70.19-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:79576c02d1207ec405b00cabf2c643c36070800cca433860e14539df7818b2aa", size = 135131, upload-time = "2026-01-19T06:47:21.879Z" }, + { url = "https://files.pythonhosted.org/packages/94/b1/0b71d18b76bf423c2e8ee00b31db37d17297ab3b4db44e188692afdca628/multiprocess-0.70.19-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c6b6d78d43a03b68014ca1f0b7937d965393a670c5de7c29026beb2258f2f896", size = 135134, upload-time = "2026-01-19T06:47:23.262Z" }, + { url = "https://files.pythonhosted.org/packages/7e/aa/714635c727dbfc251139226fa4eaf1b07f00dc12d9cd2eb25f931adaf873/multiprocess-0.70.19-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:1bbf1b69af1cf64cd05f65337d9215b88079ec819cd0ea7bac4dab84e162efe7", size = 144743, upload-time = "2026-01-19T06:47:24.562Z" }, + { url = "https://files.pythonhosted.org/packages/0f/e1/155f6abf5e6b5d9cef29b6d0167c180846157a4aca9b9bee1a217f67c959/multiprocess-0.70.19-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:5be9ec7f0c1c49a4f4a6fd20d5dda4aeabc2d39a50f4ad53720f1cd02b3a7c2e", size = 144738, upload-time = "2026-01-19T06:47:26.636Z" }, + { url = "https://files.pythonhosted.org/packages/af/cb/f421c2869d75750a4f32301cc20c4b63fab6376e9a75c8e5e655bdeb3d9b/multiprocess-0.70.19-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1c3dce098845a0db43b32a0b76a228ca059a668071cfeaa0f40c36c0b1585d45", size = 144741, upload-time = "2026-01-19T06:47:27.985Z" }, + { url = "https://files.pythonhosted.org/packages/e3/45/8004d1e6b9185c1a444d6b55ac5682acf9d98035e54386d967366035a03a/multiprocess-0.70.19-py310-none-any.whl", hash = "sha256:97404393419dcb2a8385910864eedf47a3cadf82c66345b44f036420eb0b5d87", size = 134948, upload-time = "2026-01-19T06:47:32.325Z" }, + { url = "https://files.pythonhosted.org/packages/86/c2/dec9722dc3474c164a0b6bcd9a7ed7da542c98af8cabce05374abab35edd/multiprocess-0.70.19-py311-none-any.whl", hash = "sha256:928851ae7973aea4ce0eaf330bbdafb2e01398a91518d5c8818802845564f45c", size = 144457, upload-time = "2026-01-19T06:47:33.711Z" }, + { url = "https://files.pythonhosted.org/packages/71/70/38998b950a97ea279e6bd657575d22d1a2047256caf707d9a10fbce4f065/multiprocess-0.70.19-py312-none-any.whl", hash = "sha256:3a56c0e85dd5025161bac5ce138dcac1e49174c7d8e74596537e729fd5c53c28", size = 150281, upload-time = "2026-01-19T06:47:35.037Z" }, + { url = "https://files.pythonhosted.org/packages/7e/82/69e539c4c2027f1e1697e09aaa2449243085a0edf81ae2c6341e84d769b6/multiprocess-0.70.19-py39-none-any.whl", hash = "sha256:0d4b4397ed669d371c81dcd1ef33fd384a44d6c3de1bd0ca7ac06d837720d3c5", size = 133477, upload-time = "2026-01-19T06:47:38.619Z" }, +] + [[package]] name = "nbclient" version = "0.10.4" @@ -1893,7 +5929,7 @@ wheels = [ [[package]] name = "nbconvert" -version = "7.16.6" +version = "7.17.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "beautifulsoup4" }, @@ -1911,9 +5947,9 @@ dependencies = [ { name = "pygments" }, { name = "traitlets" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a3/59/f28e15fc47ffb73af68a8d9b47367a8630d76e97ae85ad18271b9db96fdf/nbconvert-7.16.6.tar.gz", hash = "sha256:576a7e37c6480da7b8465eefa66c17844243816ce1ccc372633c6b71c3c0f582", size = 857715, upload-time = "2025-01-28T09:29:14.724Z" } +sdist = { url = "https://files.pythonhosted.org/packages/01/b1/708e53fe2e429c103c6e6e159106bcf0357ac41aa4c28772bd8402339051/nbconvert-7.17.1.tar.gz", hash = "sha256:34d0d0a7e73ce3cbab6c5aae8f4f468797280b01fd8bd2ca746da8569eddd7d2", size = 865311, upload-time = "2026-04-08T00:44:14.914Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/cc/9a/cd673b2f773a12c992f41309ef81b99da1690426bd2f96957a7ade0d3ed7/nbconvert-7.16.6-py3-none-any.whl", hash = "sha256:1375a7b67e0c2883678c48e506dc320febb57685e5ee67faa51b18a90f3a712b", size = 258525, upload-time = "2025-01-28T09:29:12.551Z" }, + { url = "https://files.pythonhosted.org/packages/67/f8/bb0a9d5f46819c821dc1f004aa2cc29b1d91453297dbf5ff20470f00f193/nbconvert-7.17.1-py3-none-any.whl", hash = "sha256:aa85c087b435e7bf1ffd03319f658e285f2b89eccab33bc1ba7025495ab3e7c8", size = 261927, upload-time = "2026-04-08T00:44:12.845Z" }, ] [[package]] @@ -1945,9 +5981,11 @@ name = "networkx" version = "3.4.2" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "python_full_version < '3.11' and sys_platform == 'darwin'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin'", "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux'", - "(python_full_version < '3.11' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform != 'darwin' and sys_platform != 'linux')", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin'", ] sdist = { url = "https://files.pythonhosted.org/packages/fd/1d/06475e1cd5264c0b870ea2cc6fdb3e37177c1e565c43f56ff17a10e3937f/networkx-3.4.2.tar.gz", hash = "sha256:307c3669428c5362aab27c8a1260aa8f47c4e91d3891f48be0141738d8d053e1", size = 2151368, upload-time = "2024-10-21T12:39:38.695Z" } wheels = [ @@ -1956,19 +5994,629 @@ wheels = [ [[package]] name = "networkx" -version = "3.5" +version = "3.6.1" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "python_full_version >= '3.12' and sys_platform == 'darwin'", - "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux'", - "(python_full_version >= '3.12' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.12' and sys_platform != 'darwin' and sys_platform != 'linux')", - "python_full_version == '3.11.*' and sys_platform == 'darwin'", - "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux'", - "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.11.*' and sys_platform != 'darwin' and sys_platform != 'linux')", -] -sdist = { url = "https://files.pythonhosted.org/packages/6c/4f/ccdb8ad3a38e583f214547fd2f7ff1fc160c43a75af88e6aec213404b96a/networkx-3.5.tar.gz", hash = "sha256:d4c6f9cf81f52d69230866796b82afbccdec3db7ae4fbd1b65ea750feed50037", size = 2471065, upload-time = "2025-05-29T11:35:07.804Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/eb/8d/776adee7bbf76365fdd7f2552710282c79a4ead5d2a46408c9043a2b70ba/networkx-3.5-py3-none-any.whl", hash = "sha256:0030d386a9a06dee3565298b4a734b68589749a544acbb6c412dc9e2489ec6ec", size = 2034406, upload-time = "2025-05-29T11:35:04.961Z" }, + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", +] +sdist = { url = "https://files.pythonhosted.org/packages/6a/51/63fe664f3908c97be9d2e4f1158eb633317598cfa6e1fc14af5383f17512/networkx-3.6.1.tar.gz", hash = "sha256:26b7c357accc0c8cde558ad486283728b65b6a95d85ee1cd66bafab4c8168509", size = 2517025, upload-time = "2025-12-08T17:02:39.908Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9e/c9/b2622292ea83fbb4ec318f5b9ab867d0a28ab43c5717bb85b0a5f6b3b0a4/networkx-3.6.1-py3-none-any.whl", hash = "sha256:d47fbf302e7d9cbbb9e2555a0d267983d2aa476bac30e90dfbe5669bd57f3762", size = 2068504, upload-time = "2025-12-08T17:02:38.159Z" }, ] [[package]] @@ -2013,6 +6661,23 @@ wheels = [ name = "numpy" version = "1.26.4" source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin'", +] sdist = { url = "https://files.pythonhosted.org/packages/65/6e/09db70a523a96d25e115e71cc56a6f9031e7b8cd166c1ac8438307c14058/numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010", size = 15786129, upload-time = "2024-02-06T00:26:44.495Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/a7/94/ace0fdea5241a27d13543ee117cbc65868e82213fb31a8eb7fe9ff23f313/numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0", size = 20631468, upload-time = "2024-02-05T23:48:01.194Z" }, @@ -2041,6 +6706,590 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/16/2e/86f24451c2d530c88daf997cb8d6ac622c1d40d19f5a031ed68a4b73a374/numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818", size = 15517754, upload-time = "2024-02-05T23:58:36.364Z" }, ] +[[package]] +name = "numpy" +version = "2.2.6" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin'", +] +sdist = { url = "https://files.pythonhosted.org/packages/76/21/7d2a95e4bba9dc13d043ee156a356c0a8f0c6309dff6b21b4d71a073b8a8/numpy-2.2.6.tar.gz", hash = "sha256:e29554e2bef54a90aa5cc07da6ce955accb83f21ab5de01a62c8478897b264fd", size = 20276440, upload-time = "2025-05-17T22:38:04.611Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9a/3e/ed6db5be21ce87955c0cbd3009f2803f59fa08df21b5df06862e2d8e2bdd/numpy-2.2.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b412caa66f72040e6d268491a59f2c43bf03eb6c96dd8f0307829feb7fa2b6fb", size = 21165245, upload-time = "2025-05-17T21:27:58.555Z" }, + { url = "https://files.pythonhosted.org/packages/22/c2/4b9221495b2a132cc9d2eb862e21d42a009f5a60e45fc44b00118c174bff/numpy-2.2.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e41fd67c52b86603a91c1a505ebaef50b3314de0213461c7a6e99c9a3beff90", size = 14360048, upload-time = "2025-05-17T21:28:21.406Z" }, + { url = "https://files.pythonhosted.org/packages/fd/77/dc2fcfc66943c6410e2bf598062f5959372735ffda175b39906d54f02349/numpy-2.2.6-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:37e990a01ae6ec7fe7fa1c26c55ecb672dd98b19c3d0e1d1f326fa13cb38d163", size = 5340542, upload-time = "2025-05-17T21:28:30.931Z" }, + { url = "https://files.pythonhosted.org/packages/7a/4f/1cb5fdc353a5f5cc7feb692db9b8ec2c3d6405453f982435efc52561df58/numpy-2.2.6-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:5a6429d4be8ca66d889b7cf70f536a397dc45ba6faeb5f8c5427935d9592e9cf", size = 6878301, upload-time = "2025-05-17T21:28:41.613Z" }, + { url = "https://files.pythonhosted.org/packages/eb/17/96a3acd228cec142fcb8723bd3cc39c2a474f7dcf0a5d16731980bcafa95/numpy-2.2.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:efd28d4e9cd7d7a8d39074a4d44c63eda73401580c5c76acda2ce969e0a38e83", size = 14297320, upload-time = "2025-05-17T21:29:02.78Z" }, + { url = "https://files.pythonhosted.org/packages/b4/63/3de6a34ad7ad6646ac7d2f55ebc6ad439dbbf9c4370017c50cf403fb19b5/numpy-2.2.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc7b73d02efb0e18c000e9ad8b83480dfcd5dfd11065997ed4c6747470ae8915", size = 16801050, upload-time = "2025-05-17T21:29:27.675Z" }, + { url = "https://files.pythonhosted.org/packages/07/b6/89d837eddef52b3d0cec5c6ba0456c1bf1b9ef6a6672fc2b7873c3ec4e2e/numpy-2.2.6-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:74d4531beb257d2c3f4b261bfb0fc09e0f9ebb8842d82a7b4209415896adc680", size = 15807034, upload-time = "2025-05-17T21:29:51.102Z" }, + { url = "https://files.pythonhosted.org/packages/01/c8/dc6ae86e3c61cfec1f178e5c9f7858584049b6093f843bca541f94120920/numpy-2.2.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8fc377d995680230e83241d8a96def29f204b5782f371c532579b4f20607a289", size = 18614185, upload-time = "2025-05-17T21:30:18.703Z" }, + { url = "https://files.pythonhosted.org/packages/5b/c5/0064b1b7e7c89137b471ccec1fd2282fceaae0ab3a9550f2568782d80357/numpy-2.2.6-cp310-cp310-win32.whl", hash = "sha256:b093dd74e50a8cba3e873868d9e93a85b78e0daf2e98c6797566ad8044e8363d", size = 6527149, upload-time = "2025-05-17T21:30:29.788Z" }, + { url = "https://files.pythonhosted.org/packages/a3/dd/4b822569d6b96c39d1215dbae0582fd99954dcbcf0c1a13c61783feaca3f/numpy-2.2.6-cp310-cp310-win_amd64.whl", hash = "sha256:f0fd6321b839904e15c46e0d257fdd101dd7f530fe03fd6359c1ea63738703f3", size = 12904620, upload-time = "2025-05-17T21:30:48.994Z" }, + { url = "https://files.pythonhosted.org/packages/da/a8/4f83e2aa666a9fbf56d6118faaaf5f1974d456b1823fda0a176eff722839/numpy-2.2.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f9f1adb22318e121c5c69a09142811a201ef17ab257a1e66ca3025065b7f53ae", size = 21176963, upload-time = "2025-05-17T21:31:19.36Z" }, + { url = "https://files.pythonhosted.org/packages/b3/2b/64e1affc7972decb74c9e29e5649fac940514910960ba25cd9af4488b66c/numpy-2.2.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c820a93b0255bc360f53eca31a0e676fd1101f673dda8da93454a12e23fc5f7a", size = 14406743, upload-time = "2025-05-17T21:31:41.087Z" }, + { url = "https://files.pythonhosted.org/packages/4a/9f/0121e375000b5e50ffdd8b25bf78d8e1a5aa4cca3f185d41265198c7b834/numpy-2.2.6-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:3d70692235e759f260c3d837193090014aebdf026dfd167834bcba43e30c2a42", size = 5352616, upload-time = "2025-05-17T21:31:50.072Z" }, + { url = "https://files.pythonhosted.org/packages/31/0d/b48c405c91693635fbe2dcd7bc84a33a602add5f63286e024d3b6741411c/numpy-2.2.6-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:481b49095335f8eed42e39e8041327c05b0f6f4780488f61286ed3c01368d491", size = 6889579, upload-time = "2025-05-17T21:32:01.712Z" }, + { url = "https://files.pythonhosted.org/packages/52/b8/7f0554d49b565d0171eab6e99001846882000883998e7b7d9f0d98b1f934/numpy-2.2.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b64d8d4d17135e00c8e346e0a738deb17e754230d7e0810ac5012750bbd85a5a", size = 14312005, upload-time = "2025-05-17T21:32:23.332Z" }, + { url = "https://files.pythonhosted.org/packages/b3/dd/2238b898e51bd6d389b7389ffb20d7f4c10066d80351187ec8e303a5a475/numpy-2.2.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba10f8411898fc418a521833e014a77d3ca01c15b0c6cdcce6a0d2897e6dbbdf", size = 16821570, upload-time = "2025-05-17T21:32:47.991Z" }, + { url = "https://files.pythonhosted.org/packages/83/6c/44d0325722cf644f191042bf47eedad61c1e6df2432ed65cbe28509d404e/numpy-2.2.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:bd48227a919f1bafbdda0583705e547892342c26fb127219d60a5c36882609d1", size = 15818548, upload-time = "2025-05-17T21:33:11.728Z" }, + { url = "https://files.pythonhosted.org/packages/ae/9d/81e8216030ce66be25279098789b665d49ff19eef08bfa8cb96d4957f422/numpy-2.2.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9551a499bf125c1d4f9e250377c1ee2eddd02e01eac6644c080162c0c51778ab", size = 18620521, upload-time = "2025-05-17T21:33:39.139Z" }, + { url = "https://files.pythonhosted.org/packages/6a/fd/e19617b9530b031db51b0926eed5345ce8ddc669bb3bc0044b23e275ebe8/numpy-2.2.6-cp311-cp311-win32.whl", hash = "sha256:0678000bb9ac1475cd454c6b8c799206af8107e310843532b04d49649c717a47", size = 6525866, upload-time = "2025-05-17T21:33:50.273Z" }, + { url = "https://files.pythonhosted.org/packages/31/0a/f354fb7176b81747d870f7991dc763e157a934c717b67b58456bc63da3df/numpy-2.2.6-cp311-cp311-win_amd64.whl", hash = "sha256:e8213002e427c69c45a52bbd94163084025f533a55a59d6f9c5b820774ef3303", size = 12907455, upload-time = "2025-05-17T21:34:09.135Z" }, + { url = "https://files.pythonhosted.org/packages/82/5d/c00588b6cf18e1da539b45d3598d3557084990dcc4331960c15ee776ee41/numpy-2.2.6-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:41c5a21f4a04fa86436124d388f6ed60a9343a6f767fced1a8a71c3fbca038ff", size = 20875348, upload-time = "2025-05-17T21:34:39.648Z" }, + { url = "https://files.pythonhosted.org/packages/66/ee/560deadcdde6c2f90200450d5938f63a34b37e27ebff162810f716f6a230/numpy-2.2.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:de749064336d37e340f640b05f24e9e3dd678c57318c7289d222a8a2f543e90c", size = 14119362, upload-time = "2025-05-17T21:35:01.241Z" }, + { url = "https://files.pythonhosted.org/packages/3c/65/4baa99f1c53b30adf0acd9a5519078871ddde8d2339dc5a7fde80d9d87da/numpy-2.2.6-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:894b3a42502226a1cac872f840030665f33326fc3dac8e57c607905773cdcde3", size = 5084103, upload-time = "2025-05-17T21:35:10.622Z" }, + { url = "https://files.pythonhosted.org/packages/cc/89/e5a34c071a0570cc40c9a54eb472d113eea6d002e9ae12bb3a8407fb912e/numpy-2.2.6-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:71594f7c51a18e728451bb50cc60a3ce4e6538822731b2933209a1f3614e9282", size = 6625382, upload-time = "2025-05-17T21:35:21.414Z" }, + { url = "https://files.pythonhosted.org/packages/f8/35/8c80729f1ff76b3921d5c9487c7ac3de9b2a103b1cd05e905b3090513510/numpy-2.2.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f2618db89be1b4e05f7a1a847a9c1c0abd63e63a1607d892dd54668dd92faf87", size = 14018462, upload-time = "2025-05-17T21:35:42.174Z" }, + { url = "https://files.pythonhosted.org/packages/8c/3d/1e1db36cfd41f895d266b103df00ca5b3cbe965184df824dec5c08c6b803/numpy-2.2.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd83c01228a688733f1ded5201c678f0c53ecc1006ffbc404db9f7a899ac6249", size = 16527618, upload-time = "2025-05-17T21:36:06.711Z" }, + { url = "https://files.pythonhosted.org/packages/61/c6/03ed30992602c85aa3cd95b9070a514f8b3c33e31124694438d88809ae36/numpy-2.2.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:37c0ca431f82cd5fa716eca9506aefcabc247fb27ba69c5062a6d3ade8cf8f49", size = 15505511, upload-time = "2025-05-17T21:36:29.965Z" }, + { url = "https://files.pythonhosted.org/packages/b7/25/5761d832a81df431e260719ec45de696414266613c9ee268394dd5ad8236/numpy-2.2.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fe27749d33bb772c80dcd84ae7e8df2adc920ae8297400dabec45f0dedb3f6de", size = 18313783, upload-time = "2025-05-17T21:36:56.883Z" }, + { url = "https://files.pythonhosted.org/packages/57/0a/72d5a3527c5ebffcd47bde9162c39fae1f90138c961e5296491ce778e682/numpy-2.2.6-cp312-cp312-win32.whl", hash = "sha256:4eeaae00d789f66c7a25ac5f34b71a7035bb474e679f410e5e1a94deb24cf2d4", size = 6246506, upload-time = "2025-05-17T21:37:07.368Z" }, + { url = "https://files.pythonhosted.org/packages/36/fa/8c9210162ca1b88529ab76b41ba02d433fd54fecaf6feb70ef9f124683f1/numpy-2.2.6-cp312-cp312-win_amd64.whl", hash = "sha256:c1f9540be57940698ed329904db803cf7a402f3fc200bfe599334c9bd84a40b2", size = 12614190, upload-time = "2025-05-17T21:37:26.213Z" }, + { url = "https://files.pythonhosted.org/packages/9e/3b/d94a75f4dbf1ef5d321523ecac21ef23a3cd2ac8b78ae2aac40873590229/numpy-2.2.6-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:0b605b275d7bd0c640cad4e5d30fa701a8d59302e127e5f79138ad62762c3e3d", size = 21040391, upload-time = "2025-05-17T21:44:35.948Z" }, + { url = "https://files.pythonhosted.org/packages/17/f4/09b2fa1b58f0fb4f7c7963a1649c64c4d315752240377ed74d9cd878f7b5/numpy-2.2.6-pp310-pypy310_pp73-macosx_14_0_x86_64.whl", hash = "sha256:7befc596a7dc9da8a337f79802ee8adb30a552a94f792b9c9d18c840055907db", size = 6786754, upload-time = "2025-05-17T21:44:47.446Z" }, + { url = "https://files.pythonhosted.org/packages/af/30/feba75f143bdc868a1cc3f44ccfa6c4b9ec522b36458e738cd00f67b573f/numpy-2.2.6-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce47521a4754c8f4593837384bd3424880629f718d87c5d44f8ed763edd63543", size = 16643476, upload-time = "2025-05-17T21:45:11.871Z" }, + { url = "https://files.pythonhosted.org/packages/37/48/ac2a9584402fb6c0cd5b5d1a91dcf176b15760130dd386bbafdbfe3640bf/numpy-2.2.6-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d042d24c90c41b54fd506da306759e06e568864df8ec17ccc17e9e884634fd00", size = 12812666, upload-time = "2025-05-17T21:45:31.426Z" }, +] + +[[package]] +name = "numpy" +version = "2.4.4" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", +] +sdist = { url = "https://files.pythonhosted.org/packages/d7/9f/b8cef5bffa569759033adda9481211426f12f53299629b410340795c2514/numpy-2.4.4.tar.gz", hash = "sha256:2d390634c5182175533585cc89f3608a4682ccb173cc9bb940b2881c8d6f8fa0", size = 20731587, upload-time = "2026-03-29T13:22:01.298Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ef/c6/4218570d8c8ecc9704b5157a3348e486e84ef4be0ed3e38218ab473c83d2/numpy-2.4.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f983334aea213c99992053ede6168500e5f086ce74fbc4acc3f2b00f5762e9db", size = 16976799, upload-time = "2026-03-29T13:18:15.438Z" }, + { url = "https://files.pythonhosted.org/packages/dd/92/b4d922c4a5f5dab9ed44e6153908a5c665b71acf183a83b93b690996e39b/numpy-2.4.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:72944b19f2324114e9dc86a159787333b77874143efcf89a5167ef83cfee8af0", size = 14971552, upload-time = "2026-03-29T13:18:18.606Z" }, + { url = "https://files.pythonhosted.org/packages/8a/dc/df98c095978fa6ee7b9a9387d1d58cbb3d232d0e69ad169a4ce784bde4fd/numpy-2.4.4-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:86b6f55f5a352b48d7fbfd2dbc3d5b780b2d79f4d3c121f33eb6efb22e9a2015", size = 5476566, upload-time = "2026-03-29T13:18:21.532Z" }, + { url = "https://files.pythonhosted.org/packages/28/34/b3fdcec6e725409223dd27356bdf5a3c2cc2282e428218ecc9cb7acc9763/numpy-2.4.4-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:ba1f4fc670ed79f876f70082eff4f9583c15fb9a4b89d6188412de4d18ae2f40", size = 6806482, upload-time = "2026-03-29T13:18:23.634Z" }, + { url = "https://files.pythonhosted.org/packages/68/62/63417c13aa35d57bee1337c67446761dc25ea6543130cf868eace6e8157b/numpy-2.4.4-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8a87ec22c87be071b6bdbd27920b129b94f2fc964358ce38f3822635a3e2e03d", size = 15973376, upload-time = "2026-03-29T13:18:26.677Z" }, + { url = "https://files.pythonhosted.org/packages/cf/c5/9fcb7e0e69cef59cf10c746b84f7d58b08bc66a6b7d459783c5a4f6101a6/numpy-2.4.4-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:df3775294accfdd75f32c74ae39fcba920c9a378a2fc18a12b6820aa8c1fb502", size = 16925137, upload-time = "2026-03-29T13:18:30.14Z" }, + { url = "https://files.pythonhosted.org/packages/7e/43/80020edacb3f84b9efdd1591120a4296462c23fd8db0dde1666f6ef66f13/numpy-2.4.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0d4e437e295f18ec29bc79daf55e8a47a9113df44d66f702f02a293d93a2d6dd", size = 17329414, upload-time = "2026-03-29T13:18:33.733Z" }, + { url = "https://files.pythonhosted.org/packages/fd/06/af0658593b18a5f73532d377188b964f239eb0894e664a6c12f484472f97/numpy-2.4.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6aa3236c78803afbcb255045fbef97a9e25a1f6c9888357d205ddc42f4d6eba5", size = 18658397, upload-time = "2026-03-29T13:18:37.511Z" }, + { url = "https://files.pythonhosted.org/packages/e6/ce/13a09ed65f5d0ce5c7dd0669250374c6e379910f97af2c08c57b0608eee4/numpy-2.4.4-cp311-cp311-win32.whl", hash = "sha256:30caa73029a225b2d40d9fae193e008e24b2026b7ee1a867b7ee8d96ca1a448e", size = 6239499, upload-time = "2026-03-29T13:18:40.372Z" }, + { url = "https://files.pythonhosted.org/packages/bd/63/05d193dbb4b5eec1eca73822d80da98b511f8328ad4ae3ca4caf0f4db91d/numpy-2.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:6bbe4eb67390b0a0265a2c25458f6b90a409d5d069f1041e6aff1e27e3d9a79e", size = 12614257, upload-time = "2026-03-29T13:18:42.95Z" }, + { url = "https://files.pythonhosted.org/packages/87/c5/8168052f080c26fa984c413305012be54741c9d0d74abd7fbeeccae3889f/numpy-2.4.4-cp311-cp311-win_arm64.whl", hash = "sha256:fcfe2045fd2e8f3cb0ce9d4ba6dba6333b8fa05bb8a4939c908cd43322d14c7e", size = 10486775, upload-time = "2026-03-29T13:18:45.835Z" }, + { url = "https://files.pythonhosted.org/packages/28/05/32396bec30fb2263770ee910142f49c1476d08e8ad41abf8403806b520ce/numpy-2.4.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:15716cfef24d3a9762e3acdf87e27f58dc823d1348f765bbea6bef8c639bfa1b", size = 16689272, upload-time = "2026-03-29T13:18:49.223Z" }, + { url = "https://files.pythonhosted.org/packages/c5/f3/a983d28637bfcd763a9c7aafdb6d5c0ebf3d487d1e1459ffdb57e2f01117/numpy-2.4.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:23cbfd4c17357c81021f21540da84ee282b9c8fba38a03b7b9d09ba6b951421e", size = 14699573, upload-time = "2026-03-29T13:18:52.629Z" }, + { url = "https://files.pythonhosted.org/packages/9b/fd/e5ecca1e78c05106d98028114f5c00d3eddb41207686b2b7de3e477b0e22/numpy-2.4.4-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:8b3b60bb7cba2c8c81837661c488637eee696f59a877788a396d33150c35d842", size = 5204782, upload-time = "2026-03-29T13:18:55.579Z" }, + { url = "https://files.pythonhosted.org/packages/de/2f/702a4594413c1a8632092beae8aba00f1d67947389369b3777aed783fdca/numpy-2.4.4-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:e4a010c27ff6f210ff4c6ef34394cd61470d01014439b192ec22552ee867f2a8", size = 6552038, upload-time = "2026-03-29T13:18:57.769Z" }, + { url = "https://files.pythonhosted.org/packages/7f/37/eed308a8f56cba4d1fdf467a4fc67ef4ff4bf1c888f5fc980481890104b1/numpy-2.4.4-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f9e75681b59ddaa5e659898085ae0eaea229d054f2ac0c7e563a62205a700121", size = 15670666, upload-time = "2026-03-29T13:19:00.341Z" }, + { url = "https://files.pythonhosted.org/packages/0a/0d/0e3ecece05b7a7e87ab9fb587855548da437a061326fff64a223b6dcb78a/numpy-2.4.4-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:81f4a14bee47aec54f883e0cad2d73986640c1590eb9bfaaba7ad17394481e6e", size = 16645480, upload-time = "2026-03-29T13:19:03.63Z" }, + { url = "https://files.pythonhosted.org/packages/34/49/f2312c154b82a286758ee2f1743336d50651f8b5195db18cdb63675ff649/numpy-2.4.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:62d6b0f03b694173f9fcb1fb317f7222fd0b0b103e784c6549f5e53a27718c44", size = 17020036, upload-time = "2026-03-29T13:19:07.428Z" }, + { url = "https://files.pythonhosted.org/packages/7b/e9/736d17bd77f1b0ec4f9901aaec129c00d59f5d84d5e79bba540ef12c2330/numpy-2.4.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fbc356aae7adf9e6336d336b9c8111d390a05df88f1805573ebb0807bd06fd1d", size = 18368643, upload-time = "2026-03-29T13:19:10.775Z" }, + { url = "https://files.pythonhosted.org/packages/63/f6/d417977c5f519b17c8a5c3bc9e8304b0908b0e21136fe43bf628a1343914/numpy-2.4.4-cp312-cp312-win32.whl", hash = "sha256:0d35aea54ad1d420c812bfa0385c71cd7cc5bcf7c65fed95fc2cd02fe8c79827", size = 5961117, upload-time = "2026-03-29T13:19:13.464Z" }, + { url = "https://files.pythonhosted.org/packages/2d/5b/e1deebf88ff431b01b7406ca3583ab2bbb90972bbe1c568732e49c844f7e/numpy-2.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:b5f0362dc928a6ecd9db58868fca5e48485205e3855957bdedea308f8672ea4a", size = 12320584, upload-time = "2026-03-29T13:19:16.155Z" }, + { url = "https://files.pythonhosted.org/packages/58/89/e4e856ac82a68c3ed64486a544977d0e7bdd18b8da75b78a577ca31c4395/numpy-2.4.4-cp312-cp312-win_arm64.whl", hash = "sha256:846300f379b5b12cc769334464656bc882e0735d27d9726568bc932fdc49d5ec", size = 10221450, upload-time = "2026-03-29T13:19:18.994Z" }, + { url = "https://files.pythonhosted.org/packages/6b/33/8fae8f964a4f63ed528264ddf25d2b683d0b663e3cba26961eb838a7c1bd/numpy-2.4.4-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:58c8b5929fcb8287cbd6f0a3fae19c6e03a5c48402ae792962ac465224a629a4", size = 16854491, upload-time = "2026-03-29T13:21:38.03Z" }, + { url = "https://files.pythonhosted.org/packages/bc/d0/1aabee441380b981cf8cdda3ae7a46aa827d1b5a8cce84d14598bc94d6d9/numpy-2.4.4-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:eea7ac5d2dce4189771cedb559c738a71512768210dc4e4753b107a2048b3d0e", size = 14895830, upload-time = "2026-03-29T13:21:41.509Z" }, + { url = "https://files.pythonhosted.org/packages/a5/b8/aafb0d1065416894fccf4df6b49ef22b8db045187949545bced89c034b8e/numpy-2.4.4-pp311-pypy311_pp73-macosx_14_0_arm64.whl", hash = "sha256:51fc224f7ca4d92656d5a5eb315f12eb5fe2c97a66249aa7b5f562528a3be38c", size = 5400927, upload-time = "2026-03-29T13:21:44.747Z" }, + { url = "https://files.pythonhosted.org/packages/d6/77/063baa20b08b431038c7f9ff5435540c7b7265c78cf56012a483019ca72d/numpy-2.4.4-pp311-pypy311_pp73-macosx_14_0_x86_64.whl", hash = "sha256:28a650663f7314afc3e6ec620f44f333c386aad9f6fc472030865dc0ebb26ee3", size = 6715557, upload-time = "2026-03-29T13:21:47.406Z" }, + { url = "https://files.pythonhosted.org/packages/c7/a8/379542d45a14f149444c5c4c4e7714707239ce9cc1de8c2803958889da14/numpy-2.4.4-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:19710a9ca9992d7174e9c52f643d4272dcd1558c5f7af7f6f8190f633bd651a7", size = 15804253, upload-time = "2026-03-29T13:21:50.753Z" }, + { url = "https://files.pythonhosted.org/packages/a2/c8/f0a45426d6d21e7ea3310a15cf90c43a14d9232c31a837702dba437f3373/numpy-2.4.4-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9b2aec6af35c113b05695ebb5749a787acd63cafc83086a05771d1e1cd1e555f", size = 16753552, upload-time = "2026-03-29T13:21:54.344Z" }, + { url = "https://files.pythonhosted.org/packages/04/74/f4c001f4714c3ad9ce037e18cf2b9c64871a84951eaa0baf683a9ca9301c/numpy-2.4.4-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:f2cf083b324a467e1ab358c105f6cad5ea950f50524668a80c486ff1db24e119", size = 12509075, upload-time = "2026-03-29T13:21:57.644Z" }, +] + [[package]] name = "nvidia-cublas-cu11" version = "11.11.3.6" @@ -2057,9 +7306,9 @@ name = "nvidia-cublas-cu12" version = "12.4.5.8" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "(python_full_version >= '3.12' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.12' and sys_platform != 'darwin' and sys_platform != 'linux')", - "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.11.*' and sys_platform != 'darwin' and sys_platform != 'linux')", - "(python_full_version < '3.11' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform != 'darwin' and sys_platform != 'linux')", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", ] wheels = [ { url = "https://files.pythonhosted.org/packages/7f/7f/7fbae15a3982dc9595e49ce0f19332423b260045d0a6afe93cdbe2f1f624/nvidia_cublas_cu12-12.4.5.8-py3-none-manylinux2014_aarch64.whl", hash = "sha256:0f8aa1706812e00b9f19dfe0cdb3999b092ccb8ca168c0db5b8ea712456fd9b3", size = 363333771, upload-time = "2024-06-18T19:28:09.881Z" }, @@ -2069,17 +7318,95 @@ wheels = [ [[package]] name = "nvidia-cublas-cu12" -version = "12.8.3.14" +version = "12.8.4.1" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "(python_full_version >= '3.12' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.12' and sys_platform != 'darwin' and sys_platform != 'linux')", - "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.11.*' and sys_platform != 'darwin' and sys_platform != 'linux')", - "(python_full_version < '3.11' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform != 'darwin' and sys_platform != 'linux')", -] -wheels = [ - { url = "https://files.pythonhosted.org/packages/ed/63/684a6f72f52671ea222c12ecde9bdf748a0ba025e2ad3ec374e466c26eb6/nvidia_cublas_cu12-12.8.3.14-py3-none-manylinux_2_27_aarch64.whl", hash = "sha256:93a4e0e386cc7f6e56c822531396de8170ed17068a1e18f987574895044cd8c3", size = 604900717, upload-time = "2025-01-23T17:52:55.486Z" }, - { url = "https://files.pythonhosted.org/packages/82/df/4b01f10069e23c641f116c62fc31e31e8dc361a153175d81561d15c8143b/nvidia_cublas_cu12-12.8.3.14-py3-none-manylinux_2_27_x86_64.whl", hash = "sha256:3f0e05e7293598cf61933258b73e66a160c27d59c4422670bf0b79348c04be44", size = 609620630, upload-time = "2025-01-23T17:55:00.753Z" }, - { url = "https://files.pythonhosted.org/packages/6c/54/fbfa3315b936d3358517f7da5f9f2557c279bf210e5261f0cf66cc0f9832/nvidia_cublas_cu12-12.8.3.14-py3-none-win_amd64.whl", hash = "sha256:9ae5eae500aead01fc4bdfc458209df638b1a3551557ce11a78eea9ece602ae9", size = 578387959, upload-time = "2025-01-23T18:08:00.662Z" }, + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/29/99/db44d685f0e257ff0e213ade1964fc459b4a690a73293220e98feb3307cf/nvidia_cublas_cu12-12.8.4.1-py3-none-manylinux_2_27_aarch64.whl", hash = "sha256:b86f6dd8935884615a0683b663891d43781b819ac4f2ba2b0c9604676af346d0", size = 590537124, upload-time = "2025-03-07T01:43:53.556Z" }, + { url = "https://files.pythonhosted.org/packages/dc/61/e24b560ab2e2eaeb3c839129175fb330dfcfc29e5203196e5541a4c44682/nvidia_cublas_cu12-12.8.4.1-py3-none-manylinux_2_27_x86_64.whl", hash = "sha256:8ac4e771d5a348c551b2a426eda6193c19aa630236b418086020df5ba9667142", size = 594346921, upload-time = "2025-03-07T01:44:31.254Z" }, + { url = "https://files.pythonhosted.org/packages/70/61/7d7b3c70186fb651d0fbd35b01dbfc8e755f69fd58f817f3d0f642df20c3/nvidia_cublas_cu12-12.8.4.1-py3-none-win_amd64.whl", hash = "sha256:47e9b82132fa8d2b4944e708049229601448aaad7e6f296f630f2d1a32de35af", size = 567544208, upload-time = "2025-03-07T01:53:30.535Z" }, ] [[package]] @@ -2098,9 +7425,9 @@ name = "nvidia-cuda-cupti-cu12" version = "12.4.127" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "(python_full_version >= '3.12' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.12' and sys_platform != 'darwin' and sys_platform != 'linux')", - "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.11.*' and sys_platform != 'darwin' and sys_platform != 'linux')", - "(python_full_version < '3.11' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform != 'darwin' and sys_platform != 'linux')", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", ] wheels = [ { url = "https://files.pythonhosted.org/packages/93/b5/9fb3d00386d3361b03874246190dfec7b206fd74e6e287b26a8fcb359d95/nvidia_cuda_cupti_cu12-12.4.127-py3-none-manylinux2014_aarch64.whl", hash = "sha256:79279b35cf6f91da114182a5ce1864997fd52294a87a16179ce275773799458a", size = 12354556, upload-time = "2024-06-18T19:30:40.546Z" }, @@ -2110,17 +7437,95 @@ wheels = [ [[package]] name = "nvidia-cuda-cupti-cu12" -version = "12.8.57" +version = "12.8.90" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "(python_full_version >= '3.12' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.12' and sys_platform != 'darwin' and sys_platform != 'linux')", - "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.11.*' and sys_platform != 'darwin' and sys_platform != 'linux')", - "(python_full_version < '3.11' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform != 'darwin' and sys_platform != 'linux')", -] -wheels = [ - { url = "https://files.pythonhosted.org/packages/fe/53/458956a65283c55c22ba40a65745bbe9ff20c10b68ea241bc575e20c0465/nvidia_cuda_cupti_cu12-12.8.57-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ff154211724fd824e758ce176b66007b558eea19c9a5135fc991827ee147e317", size = 9526469, upload-time = "2025-01-23T17:47:33.104Z" }, - { url = "https://files.pythonhosted.org/packages/39/6f/3683ecf4e38931971946777d231c2df00dd5c1c4c2c914c42ad8f9f4dca6/nvidia_cuda_cupti_cu12-12.8.57-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8e0b2eb847de260739bee4a3f66fac31378f4ff49538ff527a38a01a9a39f950", size = 10237547, upload-time = "2025-01-23T17:47:56.863Z" }, - { url = "https://files.pythonhosted.org/packages/3f/2a/cabe033045427beb042b70b394ac3fd7cfefe157c965268824011b16af67/nvidia_cuda_cupti_cu12-12.8.57-py3-none-win_amd64.whl", hash = "sha256:bbed719c52a476958a74cfc42f2b95a3fd6b3fd94eb40134acc4601feb4acac3", size = 7002337, upload-time = "2025-01-23T18:04:35.34Z" }, + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/d5/1f/b3bd73445e5cb342727fd24fe1f7b748f690b460acadc27ea22f904502c8/nvidia_cuda_cupti_cu12-12.8.90-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:4412396548808ddfed3f17a467b104ba7751e6b58678a4b840675c56d21cf7ed", size = 9533318, upload-time = "2025-03-07T01:40:10.421Z" }, + { url = "https://files.pythonhosted.org/packages/f8/02/2adcaa145158bf1a8295d83591d22e4103dbfd821bcaf6f3f53151ca4ffa/nvidia_cuda_cupti_cu12-12.8.90-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ea0cb07ebda26bb9b29ba82cda34849e73c166c18162d3913575b0c9db9a6182", size = 10248621, upload-time = "2025-03-07T01:40:21.213Z" }, + { url = "https://files.pythonhosted.org/packages/41/bc/83f5426095d93694ae39fe1311431b5d5a9bb82e48bf0dd8e19be2765942/nvidia_cuda_cupti_cu12-12.8.90-py3-none-win_amd64.whl", hash = "sha256:bb479dcdf7e6d4f8b0b01b115260399bf34154a1a2e9fe11c85c517d87efd98e", size = 7015759, upload-time = "2025-03-07T01:51:11.355Z" }, ] [[package]] @@ -2139,9 +7544,9 @@ name = "nvidia-cuda-nvrtc-cu12" version = "12.4.127" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "(python_full_version >= '3.12' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.12' and sys_platform != 'darwin' and sys_platform != 'linux')", - "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.11.*' and sys_platform != 'darwin' and sys_platform != 'linux')", - "(python_full_version < '3.11' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform != 'darwin' and sys_platform != 'linux')", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", ] wheels = [ { url = "https://files.pythonhosted.org/packages/77/aa/083b01c427e963ad0b314040565ea396f914349914c298556484f799e61b/nvidia_cuda_nvrtc_cu12-12.4.127-py3-none-manylinux2014_aarch64.whl", hash = "sha256:0eedf14185e04b76aa05b1fea04133e59f465b6f960c0cbf4e37c3cb6b0ea198", size = 24133372, upload-time = "2024-06-18T19:32:00.576Z" }, @@ -2151,17 +7556,95 @@ wheels = [ [[package]] name = "nvidia-cuda-nvrtc-cu12" -version = "12.8.61" +version = "12.8.93" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "(python_full_version >= '3.12' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.12' and sys_platform != 'darwin' and sys_platform != 'linux')", - "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.11.*' and sys_platform != 'darwin' and sys_platform != 'linux')", - "(python_full_version < '3.11' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform != 'darwin' and sys_platform != 'linux')", -] -wheels = [ - { url = "https://files.pythonhosted.org/packages/d4/22/32029d4583f7b19cfe75c84399cbcfd23f2aaf41c66fc8db4da460104fff/nvidia_cuda_nvrtc_cu12-12.8.61-py3-none-manylinux2010_x86_64.manylinux_2_12_x86_64.whl", hash = "sha256:a0fa9c2a21583105550ebd871bd76e2037205d56f33f128e69f6d2a55e0af9ed", size = 88024585, upload-time = "2025-01-23T17:50:10.722Z" }, - { url = "https://files.pythonhosted.org/packages/f1/98/29f98d57fc40d6646337e942d37509c6d5f8abe29012671f7a6eb9978ebe/nvidia_cuda_nvrtc_cu12-12.8.61-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b1f376bf58111ca73dde4fd4df89a462b164602e074a76a2c29c121ca478dcd4", size = 43097015, upload-time = "2025-01-23T17:49:44.331Z" }, - { url = "https://files.pythonhosted.org/packages/f8/5b/052d05aa068e4752415ad03bac58e852ea8bc17c9321e08546b3f261e47e/nvidia_cuda_nvrtc_cu12-12.8.61-py3-none-win_amd64.whl", hash = "sha256:9c8887bf5e5dffc441018ba8c5dc59952372a6f4806819e8c1f03d62637dbeea", size = 73567440, upload-time = "2025-01-23T18:05:51.036Z" }, + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/05/6b/32f747947df2da6994e999492ab306a903659555dddc0fbdeb9d71f75e52/nvidia_cuda_nvrtc_cu12-12.8.93-py3-none-manylinux2010_x86_64.manylinux_2_12_x86_64.whl", hash = "sha256:a7756528852ef889772a84c6cd89d41dfa74667e24cca16bb31f8f061e3e9994", size = 88040029, upload-time = "2025-03-07T01:42:13.562Z" }, + { url = "https://files.pythonhosted.org/packages/eb/d1/e50d0acaab360482034b84b6e27ee83c6738f7d32182b987f9c7a4e32962/nvidia_cuda_nvrtc_cu12-12.8.93-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:fc1fec1e1637854b4c0a65fb9a8346b51dd9ee69e61ebaccc82058441f15bce8", size = 43106076, upload-time = "2025-03-07T01:41:59.817Z" }, + { url = "https://files.pythonhosted.org/packages/45/51/52a3d84baa2136cc8df15500ad731d74d3a1114d4c123e043cb608d4a32b/nvidia_cuda_nvrtc_cu12-12.8.93-py3-none-win_amd64.whl", hash = "sha256:7a4b6b2904850fe78e0bd179c4b655c404d4bb799ef03ddc60804247099ae909", size = 73586838, upload-time = "2025-03-07T01:52:13.483Z" }, ] [[package]] @@ -2180,9 +7663,15 @@ name = "nvidia-cuda-runtime-cu12" version = "12.4.127" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "(python_full_version >= '3.12' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.12' and sys_platform != 'darwin' and sys_platform != 'linux')", - "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.11.*' and sys_platform != 'darwin' and sys_platform != 'linux')", - "(python_full_version < '3.11' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform != 'darwin' and sys_platform != 'linux')", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux'", ] wheels = [ { url = "https://files.pythonhosted.org/packages/a1/aa/b656d755f474e2084971e9a297def515938d56b466ab39624012070cb773/nvidia_cuda_runtime_cu12-12.4.127-py3-none-manylinux2014_aarch64.whl", hash = "sha256:961fe0e2e716a2a1d967aab7caee97512f71767f852f67432d572e36cb3a11f3", size = 894177, upload-time = "2024-06-18T19:32:52.877Z" }, @@ -2192,17 +7681,111 @@ wheels = [ [[package]] name = "nvidia-cuda-runtime-cu12" -version = "12.8.57" +version = "12.8.90" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "(python_full_version >= '3.12' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.12' and sys_platform != 'darwin' and sys_platform != 'linux')", - "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.11.*' and sys_platform != 'darwin' and sys_platform != 'linux')", - "(python_full_version < '3.11' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform != 'darwin' and sys_platform != 'linux')", -] -wheels = [ - { url = "https://files.pythonhosted.org/packages/cd/9d/e77ec4227e70c6006195bdf410370f2d0e5abfa2dc0d1d315cacd57c5c88/nvidia_cuda_runtime_cu12-12.8.57-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:534ccebd967b6a44292678fa5da4f00666029cb2ed07a79515ea41ef31fe3ec7", size = 965264, upload-time = "2025-01-23T17:47:11.759Z" }, - { url = "https://files.pythonhosted.org/packages/16/f6/0e1ef31f4753a44084310ba1a7f0abaf977ccd810a604035abb43421c057/nvidia_cuda_runtime_cu12-12.8.57-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:75342e28567340b7428ce79a5d6bb6ca5ff9d07b69e7ce00d2c7b4dc23eff0be", size = 954762, upload-time = "2025-01-23T17:47:22.21Z" }, - { url = "https://files.pythonhosted.org/packages/16/ee/52508c74bee2a3de8d59c6fd9af4ca2f216052fa2bc916da3a6a7bb998af/nvidia_cuda_runtime_cu12-12.8.57-py3-none-win_amd64.whl", hash = "sha256:89be637e3ee967323865b85e0f147d75f9a5bd98360befa37481b02dd57af8f5", size = 944309, upload-time = "2025-01-23T18:04:23.143Z" }, + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/7c/75/f865a3b236e4647605ea34cc450900854ba123834a5f1598e160b9530c3a/nvidia_cuda_runtime_cu12-12.8.90-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:52bf7bbee900262ffefe5e9d5a2a69a30d97e2bc5bb6cc866688caa976966e3d", size = 965265, upload-time = "2025-03-07T01:39:43.533Z" }, + { url = "https://files.pythonhosted.org/packages/0d/9b/a997b638fcd068ad6e4d53b8551a7d30fe8b404d6f1804abf1df69838932/nvidia_cuda_runtime_cu12-12.8.90-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:adade8dcbd0edf427b7204d480d6066d33902cab2a4707dcfc48a2d0fd44ab90", size = 954765, upload-time = "2025-03-07T01:40:01.615Z" }, + { url = "https://files.pythonhosted.org/packages/30/a5/a515b7600ad361ea14bfa13fb4d6687abf500adc270f19e89849c0590492/nvidia_cuda_runtime_cu12-12.8.90-py3-none-win_amd64.whl", hash = "sha256:c0c6027f01505bfed6c3b21ec546f69c687689aad5f1a377554bc6ca4aa993a8", size = 944318, upload-time = "2025-03-07T01:51:01.794Z" }, ] [[package]] @@ -2210,12 +7793,383 @@ name = "nvidia-cuda-runtime-cu12" version = "12.9.79" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux'", - "(python_full_version >= '3.12' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.12' and sys_platform != 'darwin' and sys_platform != 'linux')", - "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux'", - "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.11.*' and sys_platform != 'darwin' and sys_platform != 'linux')", - "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux'", - "(python_full_version < '3.11' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform != 'darwin' and sys_platform != 'linux')", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", ] wheels = [ { url = "https://files.pythonhosted.org/packages/bc/e0/0279bd94539fda525e0c8538db29b72a5a8495b0c12173113471d28bce78/nvidia_cuda_runtime_cu12-12.9.79-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:83469a846206f2a733db0c42e223589ab62fd2fabac4432d2f8802de4bded0a4", size = 3515012, upload-time = "2025-06-05T20:00:35.519Z" }, @@ -2228,7 +8182,7 @@ name = "nvidia-cudnn-cu11" version = "9.1.0.70" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "nvidia-cublas-cu11", marker = "(platform_machine != 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "nvidia-cublas-cu11", marker = "(platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu118') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, ] wheels = [ { url = "https://files.pythonhosted.org/packages/00/3b/0b776f04e364cd99e4cf152c2a9eadb5934c67c9a91429da55169a9447fd/nvidia_cudnn_cu11-9.1.0.70-py3-none-manylinux2014_x86_64.whl", hash = "sha256:e6135ac63fe9d5b0b89cfb35c3fc1c1349f2b995becadf2e9dc21bca89d9633d", size = 663919573, upload-time = "2024-04-22T15:20:24.839Z" }, @@ -2240,12 +8194,12 @@ name = "nvidia-cudnn-cu12" version = "9.1.0.70" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "(python_full_version >= '3.12' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.12' and sys_platform != 'darwin' and sys_platform != 'linux')", - "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.11.*' and sys_platform != 'darwin' and sys_platform != 'linux')", - "(python_full_version < '3.11' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform != 'darwin' and sys_platform != 'linux')", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", ] dependencies = [ - { name = "nvidia-cublas-cu12", version = "12.4.5.8", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine != 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "nvidia-cublas-cu12", version = "12.4.5.8", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, ] wheels = [ { url = "https://files.pythonhosted.org/packages/9f/fd/713452cd72343f682b1c7b9321e23829f00b842ceaedcda96e742ea0b0b3/nvidia_cudnn_cu12-9.1.0.70-py3-none-manylinux2014_x86_64.whl", hash = "sha256:165764f44ef8c61fcdfdfdbe769d687e06374059fbb388b6c89ecb0e28793a6f", size = 664752741, upload-time = "2024-04-22T15:24:15.253Z" }, @@ -2254,20 +8208,98 @@ wheels = [ [[package]] name = "nvidia-cudnn-cu12" -version = "9.7.1.26" +version = "9.19.0.56" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "(python_full_version >= '3.12' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.12' and sys_platform != 'darwin' and sys_platform != 'linux')", - "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.11.*' and sys_platform != 'darwin' and sys_platform != 'linux')", - "(python_full_version < '3.11' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform != 'darwin' and sys_platform != 'linux')", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", ] dependencies = [ - { name = "nvidia-cublas-cu12", version = "12.8.3.14", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine != 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "nvidia-cublas-cu12", version = "12.8.4.1", source = { registry = "https://pypi.org/simple" }, marker = "(sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, ] wheels = [ - { url = "https://files.pythonhosted.org/packages/c1/2e/ec5dda717eeb1de3afbbbb611ca556f9d6d057470759c6abd36d72f0063b/nvidia_cudnn_cu12-9.7.1.26-py3-none-manylinux_2_27_aarch64.whl", hash = "sha256:848a61d40ef3b32bd4e1fadb599f0cf04a4b942fbe5fb3be572ad75f9b8c53ef", size = 725862213, upload-time = "2025-02-06T22:14:57.169Z" }, - { url = "https://files.pythonhosted.org/packages/25/dc/dc825c4b1c83b538e207e34f48f86063c88deaa35d46c651c7c181364ba2/nvidia_cudnn_cu12-9.7.1.26-py3-none-manylinux_2_27_x86_64.whl", hash = "sha256:6d011159a158f3cfc47bf851aea79e31bcff60d530b70ef70474c84cac484d07", size = 726851421, upload-time = "2025-02-06T22:18:29.812Z" }, - { url = "https://files.pythonhosted.org/packages/d0/ea/636cda41b3865caa0d43c34f558167304acde3d2c5f6c54c00a550e69ecd/nvidia_cudnn_cu12-9.7.1.26-py3-none-win_amd64.whl", hash = "sha256:7b805b9a4cf9f3da7c5f4ea4a9dff7baf62d1a612d6154a7e0d2ea51ed296241", size = 715962100, upload-time = "2025-02-06T22:21:32.431Z" }, + { url = "https://files.pythonhosted.org/packages/09/b8/277c51962ee46fa3e5b203ac5f76107c650f781d6891e681e28e6f3e9fe6/nvidia_cudnn_cu12-9.19.0.56-py3-none-manylinux_2_27_aarch64.whl", hash = "sha256:08caaf27fe556aca82a3ee3b5aa49a77e7de0cfcb7ff4e5c29da426387a8267e", size = 656910700, upload-time = "2026-02-03T20:40:25.508Z" }, + { url = "https://files.pythonhosted.org/packages/c5/41/65225d42fba06fb3dd3972485ea258e7dd07a40d6e01c95da6766ad87354/nvidia_cudnn_cu12-9.19.0.56-py3-none-manylinux_2_27_x86_64.whl", hash = "sha256:ac6ad90a075bb33a94f2b4cf4622eac13dd4dc65cf6dd9c7572a318516a36625", size = 657906812, upload-time = "2026-02-03T20:44:12.638Z" }, + { url = "https://files.pythonhosted.org/packages/a7/a5/48f07449fc9c6cc146dcafe6149fa5d69630137d2ec5b7d9e09f255fadd7/nvidia_cudnn_cu12-9.19.0.56-py3-none-win_amd64.whl", hash = "sha256:cec70596b9ce878fab83810c3f5a2e606d35f510e5fee579759e4cbc68a23750", size = 644003014, upload-time = "2026-02-03T20:46:25.768Z" }, ] [[package]] @@ -2286,12 +8318,12 @@ name = "nvidia-cufft-cu12" version = "11.2.1.3" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "(python_full_version >= '3.12' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.12' and sys_platform != 'darwin' and sys_platform != 'linux')", - "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.11.*' and sys_platform != 'darwin' and sys_platform != 'linux')", - "(python_full_version < '3.11' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform != 'darwin' and sys_platform != 'linux')", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", ] dependencies = [ - { name = "nvidia-nvjitlink-cu12", version = "12.4.127", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine != 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "nvidia-nvjitlink-cu12", version = "12.4.127", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, ] wheels = [ { url = "https://files.pythonhosted.org/packages/7a/8a/0e728f749baca3fbeffad762738276e5df60851958be7783af121a7221e7/nvidia_cufft_cu12-11.2.1.3-py3-none-manylinux2014_aarch64.whl", hash = "sha256:5dad8008fc7f92f5ddfa2101430917ce2ffacd86824914c82e28990ad7f00399", size = 211422548, upload-time = "2024-06-18T19:33:39.396Z" }, @@ -2301,29 +8333,107 @@ wheels = [ [[package]] name = "nvidia-cufft-cu12" -version = "11.3.3.41" +version = "11.3.3.83" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "(python_full_version >= '3.12' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.12' and sys_platform != 'darwin' and sys_platform != 'linux')", - "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.11.*' and sys_platform != 'darwin' and sys_platform != 'linux')", - "(python_full_version < '3.11' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform != 'darwin' and sys_platform != 'linux')", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", ] dependencies = [ - { name = "nvidia-nvjitlink-cu12", version = "12.8.61", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine != 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "nvidia-nvjitlink-cu12", version = "12.8.93", source = { registry = "https://pypi.org/simple" }, marker = "(sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, ] wheels = [ - { url = "https://files.pythonhosted.org/packages/72/95/6157cb45a49f5090a470de42353a22a0ed5b13077886dca891b4b0e350fe/nvidia_cufft_cu12-11.3.3.41-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:68509dcd7e3306e69d0e2d8a6d21c8b25ed62e6df8aac192ce752f17677398b5", size = 193108626, upload-time = "2025-01-23T17:55:49.192Z" }, - { url = "https://files.pythonhosted.org/packages/ac/26/b53c493c38dccb1f1a42e1a21dc12cba2a77fbe36c652f7726d9ec4aba28/nvidia_cufft_cu12-11.3.3.41-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:da650080ab79fcdf7a4b06aa1b460e99860646b176a43f6208099bdc17836b6a", size = 193118795, upload-time = "2025-01-23T17:56:30.536Z" }, - { url = "https://files.pythonhosted.org/packages/32/f3/f6248aa119c2726b1bdd02d472332cae274133bd32ca5fa8822efb0c308c/nvidia_cufft_cu12-11.3.3.41-py3-none-win_amd64.whl", hash = "sha256:f9760612886786601d27a0993bb29ce1f757e6b8b173499d0ecfa850d31b50f8", size = 192216738, upload-time = "2025-01-23T18:08:51.102Z" }, + { url = "https://files.pythonhosted.org/packages/60/bc/7771846d3a0272026c416fbb7e5f4c1f146d6d80704534d0b187dd6f4800/nvidia_cufft_cu12-11.3.3.83-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:848ef7224d6305cdb2a4df928759dca7b1201874787083b6e7550dd6765ce69a", size = 193109211, upload-time = "2025-03-07T01:44:56.873Z" }, + { url = "https://files.pythonhosted.org/packages/1f/13/ee4e00f30e676b66ae65b4f08cb5bcbb8392c03f54f2d5413ea99a5d1c80/nvidia_cufft_cu12-11.3.3.83-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4d2dd21ec0b88cf61b62e6b43564355e5222e4a3fb394cac0db101f2dd0d4f74", size = 193118695, upload-time = "2025-03-07T01:45:27.821Z" }, + { url = "https://files.pythonhosted.org/packages/7d/ec/ce1629f1e478bb5ccd208986b5f9e0316a78538dd6ab1d0484f012f8e2a1/nvidia_cufft_cu12-11.3.3.83-py3-none-win_amd64.whl", hash = "sha256:7a64a98ef2a7c47f905aaf8931b69a3a43f27c55530c698bb2ed7c75c0b42cb7", size = 192216559, upload-time = "2025-03-07T01:53:57.106Z" }, ] [[package]] name = "nvidia-cufile-cu12" -version = "1.13.0.11" +version = "1.13.1.3" source = { registry = "https://pypi.org/simple" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e5/9c/1f3264d0a84c8a031487fb7f59780fc78fa6f1c97776233956780e3dc3ac/nvidia_cufile_cu12-1.13.0.11-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:483f434c541806936b98366f6d33caef5440572de8ddf38d453213729da3e7d4", size = 1197801, upload-time = "2025-01-23T17:57:07.247Z" }, - { url = "https://files.pythonhosted.org/packages/35/80/f6a0fc90ab6fa4ac916f3643e5b620fd19724626c59ae83b74f5efef0349/nvidia_cufile_cu12-1.13.0.11-py3-none-manylinux_2_27_aarch64.whl", hash = "sha256:2acbee65dc2eaf58331f0798c5e6bcdd790c4acb26347530297e63528c9eba5d", size = 1120660, upload-time = "2025-01-23T17:56:56.608Z" }, + { url = "https://files.pythonhosted.org/packages/bb/fe/1bcba1dfbfb8d01be8d93f07bfc502c93fa23afa6fd5ab3fc7c1df71038a/nvidia_cufile_cu12-1.13.1.3-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1d069003be650e131b21c932ec3d8969c1715379251f8d23a1860554b1cb24fc", size = 1197834, upload-time = "2025-03-07T01:45:50.723Z" }, + { url = "https://files.pythonhosted.org/packages/1e/f5/5607710447a6fe9fd9b3283956fceeee8a06cda1d2f56ce31371f595db2a/nvidia_cufile_cu12-1.13.1.3-py3-none-manylinux_2_27_aarch64.whl", hash = "sha256:4beb6d4cce47c1a0f1013d72e02b0994730359e17801d395bdcbf20cfb3bb00a", size = 1120705, upload-time = "2025-03-07T01:45:41.434Z" }, ] [[package]] @@ -2342,9 +8452,9 @@ name = "nvidia-curand-cu12" version = "10.3.5.147" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "(python_full_version >= '3.12' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.12' and sys_platform != 'darwin' and sys_platform != 'linux')", - "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.11.*' and sys_platform != 'darwin' and sys_platform != 'linux')", - "(python_full_version < '3.11' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform != 'darwin' and sys_platform != 'linux')", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", ] wheels = [ { url = "https://files.pythonhosted.org/packages/80/9c/a79180e4d70995fdf030c6946991d0171555c6edf95c265c6b2bf7011112/nvidia_curand_cu12-10.3.5.147-py3-none-manylinux2014_aarch64.whl", hash = "sha256:1f173f09e3e3c76ab084aba0de819c49e56614feae5c12f69883f4ae9bb5fad9", size = 56314811, upload-time = "2024-06-18T19:34:48.575Z" }, @@ -2354,17 +8464,95 @@ wheels = [ [[package]] name = "nvidia-curand-cu12" -version = "10.3.9.55" +version = "10.3.9.90" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "(python_full_version >= '3.12' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.12' and sys_platform != 'darwin' and sys_platform != 'linux')", - "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.11.*' and sys_platform != 'darwin' and sys_platform != 'linux')", - "(python_full_version < '3.11' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform != 'darwin' and sys_platform != 'linux')", -] -wheels = [ - { url = "https://files.pythonhosted.org/packages/f4/13/bbcf48e2f8a6a9adef58f130bc968810528a4e66bbbe62fad335241e699f/nvidia_curand_cu12-10.3.9.55-py3-none-manylinux_2_27_aarch64.whl", hash = "sha256:b6bb90c044fa9b07cedae2ef29077c4cf851fb6fdd6d862102321f359dca81e9", size = 63623836, upload-time = "2025-01-23T17:57:22.319Z" }, - { url = "https://files.pythonhosted.org/packages/bd/fc/7be5d0082507269bb04ac07cc614c84b78749efb96e8cf4100a8a1178e98/nvidia_curand_cu12-10.3.9.55-py3-none-manylinux_2_27_x86_64.whl", hash = "sha256:8387d974240c91f6a60b761b83d4b2f9b938b7e0b9617bae0f0dafe4f5c36b86", size = 63618038, upload-time = "2025-01-23T17:57:41.838Z" }, - { url = "https://files.pythonhosted.org/packages/d6/f0/91252f3cffe3f3c233a8e17262c21b41534652edfe783c1e58ea1c92c115/nvidia_curand_cu12-10.3.9.55-py3-none-win_amd64.whl", hash = "sha256:570d82475fe7f3d8ed01ffbe3b71796301e0e24c98762ca018ff8ce4f5418e1f", size = 62761446, upload-time = "2025-01-23T18:09:21.663Z" }, + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/45/5e/92aa15eca622a388b80fbf8375d4760738df6285b1e92c43d37390a33a9a/nvidia_curand_cu12-10.3.9.90-py3-none-manylinux_2_27_aarch64.whl", hash = "sha256:dfab99248034673b779bc6decafdc3404a8a6f502462201f2f31f11354204acd", size = 63625754, upload-time = "2025-03-07T01:46:10.735Z" }, + { url = "https://files.pythonhosted.org/packages/fb/aa/6584b56dc84ebe9cf93226a5cde4d99080c8e90ab40f0c27bda7a0f29aa1/nvidia_curand_cu12-10.3.9.90-py3-none-manylinux_2_27_x86_64.whl", hash = "sha256:b32331d4f4df5d6eefa0554c565b626c7216f87a06a4f56fab27c3b68a830ec9", size = 63619976, upload-time = "2025-03-07T01:46:23.323Z" }, + { url = "https://files.pythonhosted.org/packages/b9/75/70c05b2f3ed5be3bb30b7102b6eb78e100da4bbf6944fd6725c012831cab/nvidia_curand_cu12-10.3.9.90-py3-none-win_amd64.whl", hash = "sha256:f149a8ca457277da854f89cf282d6ef43176861926c7ac85b2a0fbd237c587ec", size = 62765309, upload-time = "2025-03-07T01:54:20.478Z" }, ] [[package]] @@ -2372,7 +8560,7 @@ name = "nvidia-cusolver-cu11" version = "11.4.1.48" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "nvidia-cublas-cu11", marker = "(platform_machine != 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "nvidia-cublas-cu11", marker = "(platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu118') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, ] wheels = [ { url = "https://files.pythonhosted.org/packages/55/ee/939ff0104991dd7bdabb4c9767994c612ba0e1c9a55672a1ddd42f5e5b16/nvidia_cusolver_cu11-11.4.1.48-py3-none-manylinux1_x86_64.whl", hash = "sha256:ca538f545645b7e6629140786d3127fe067b3d5a085bd794cde5bfe877c8926f", size = 128240842, upload-time = "2022-10-03T23:30:24.348Z" }, @@ -2386,14 +8574,14 @@ name = "nvidia-cusolver-cu12" version = "11.6.1.9" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "(python_full_version >= '3.12' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.12' and sys_platform != 'darwin' and sys_platform != 'linux')", - "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.11.*' and sys_platform != 'darwin' and sys_platform != 'linux')", - "(python_full_version < '3.11' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform != 'darwin' and sys_platform != 'linux')", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", ] dependencies = [ - { name = "nvidia-cublas-cu12", version = "12.4.5.8", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine != 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "nvidia-cusparse-cu12", version = "12.3.1.170", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine != 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "nvidia-nvjitlink-cu12", version = "12.4.127", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine != 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "nvidia-cublas-cu12", version = "12.4.5.8", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "nvidia-cusparse-cu12", version = "12.3.1.170", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "nvidia-nvjitlink-cu12", version = "12.4.127", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, ] wheels = [ { url = "https://files.pythonhosted.org/packages/46/6b/a5c33cf16af09166845345275c34ad2190944bcc6026797a39f8e0a282e0/nvidia_cusolver_cu12-11.6.1.9-py3-none-manylinux2014_aarch64.whl", hash = "sha256:d338f155f174f90724bbde3758b7ac375a70ce8e706d70b018dd3375545fc84e", size = 127634111, upload-time = "2024-06-18T19:35:01.793Z" }, @@ -2403,22 +8591,100 @@ wheels = [ [[package]] name = "nvidia-cusolver-cu12" -version = "11.7.2.55" +version = "11.7.3.90" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "(python_full_version >= '3.12' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.12' and sys_platform != 'darwin' and sys_platform != 'linux')", - "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.11.*' and sys_platform != 'darwin' and sys_platform != 'linux')", - "(python_full_version < '3.11' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform != 'darwin' and sys_platform != 'linux')", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", ] dependencies = [ - { name = "nvidia-cublas-cu12", version = "12.8.3.14", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine != 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "nvidia-cusparse-cu12", version = "12.5.7.53", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine != 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "nvidia-nvjitlink-cu12", version = "12.8.61", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine != 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "nvidia-cublas-cu12", version = "12.8.4.1", source = { registry = "https://pypi.org/simple" }, marker = "(sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "nvidia-cusparse-cu12", version = "12.5.8.93", source = { registry = "https://pypi.org/simple" }, marker = "(sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "nvidia-nvjitlink-cu12", version = "12.8.93", source = { registry = "https://pypi.org/simple" }, marker = "(sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, ] wheels = [ - { url = "https://files.pythonhosted.org/packages/8c/ce/4214a892e804b20bf66d04f04a473006fc2d3dac158160ef85f1bc906639/nvidia_cusolver_cu12-11.7.2.55-py3-none-manylinux_2_27_aarch64.whl", hash = "sha256:0fd9e98246f43c15bee5561147ad235dfdf2d037f5d07c9d41af3f7f72feb7cc", size = 260094827, upload-time = "2025-01-23T17:58:17.586Z" }, - { url = "https://files.pythonhosted.org/packages/c2/08/953675873a136d96bb12f93b49ba045d1107bc94d2551c52b12fa6c7dec3/nvidia_cusolver_cu12-11.7.2.55-py3-none-manylinux_2_27_x86_64.whl", hash = "sha256:4d1354102f1e922cee9db51920dba9e2559877cf6ff5ad03a00d853adafb191b", size = 260373342, upload-time = "2025-01-23T17:58:56.406Z" }, - { url = "https://files.pythonhosted.org/packages/c4/f9/e0e6f8b7aecd13e0f9e937d116fb3211329a0a92b9bea9624b1368de307a/nvidia_cusolver_cu12-11.7.2.55-py3-none-win_amd64.whl", hash = "sha256:a5a516c55da5c5aba98420d9bc9bcab18245f21ec87338cc1f930eb18dd411ac", size = 249600787, upload-time = "2025-01-23T18:10:07.641Z" }, + { url = "https://files.pythonhosted.org/packages/c8/32/f7cd6ce8a7690544d084ea21c26e910a97e077c9b7f07bf5de623ee19981/nvidia_cusolver_cu12-11.7.3.90-py3-none-manylinux_2_27_aarch64.whl", hash = "sha256:db9ed69dbef9715071232caa9b69c52ac7de3a95773c2db65bdba85916e4e5c0", size = 267229841, upload-time = "2025-03-07T01:46:54.356Z" }, + { url = "https://files.pythonhosted.org/packages/85/48/9a13d2975803e8cf2777d5ed57b87a0b6ca2cc795f9a4f59796a910bfb80/nvidia_cusolver_cu12-11.7.3.90-py3-none-manylinux_2_27_x86_64.whl", hash = "sha256:4376c11ad263152bd50ea295c05370360776f8c3427b30991df774f9fb26c450", size = 267506905, upload-time = "2025-03-07T01:47:16.273Z" }, + { url = "https://files.pythonhosted.org/packages/13/c0/76ca8551b8a84146ffa189fec81c26d04adba4bc0dbe09cd6e6fd9b7de04/nvidia_cusolver_cu12-11.7.3.90-py3-none-win_amd64.whl", hash = "sha256:4a550db115fcabc4d495eb7d39ac8b58d4ab5d8e63274d3754df1c0ad6a22d34", size = 256720438, upload-time = "2025-03-07T01:54:39.898Z" }, ] [[package]] @@ -2437,12 +8703,12 @@ name = "nvidia-cusparse-cu12" version = "12.3.1.170" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "(python_full_version >= '3.12' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.12' and sys_platform != 'darwin' and sys_platform != 'linux')", - "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.11.*' and sys_platform != 'darwin' and sys_platform != 'linux')", - "(python_full_version < '3.11' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform != 'darwin' and sys_platform != 'linux')", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", ] dependencies = [ - { name = "nvidia-nvjitlink-cu12", version = "12.4.127", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine != 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "nvidia-nvjitlink-cu12", version = "12.4.127", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, ] wheels = [ { url = "https://files.pythonhosted.org/packages/96/a9/c0d2f83a53d40a4a41be14cea6a0bf9e668ffcf8b004bd65633f433050c0/nvidia_cusparse_cu12-12.3.1.170-py3-none-manylinux2014_aarch64.whl", hash = "sha256:9d32f62896231ebe0480efd8a7f702e143c98cfaa0e8a76df3386c1ba2b54df3", size = 207381987, upload-time = "2024-06-18T19:35:32.989Z" }, @@ -2452,20 +8718,98 @@ wheels = [ [[package]] name = "nvidia-cusparse-cu12" -version = "12.5.7.53" +version = "12.5.8.93" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "(python_full_version >= '3.12' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.12' and sys_platform != 'darwin' and sys_platform != 'linux')", - "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.11.*' and sys_platform != 'darwin' and sys_platform != 'linux')", - "(python_full_version < '3.11' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform != 'darwin' and sys_platform != 'linux')", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", ] dependencies = [ - { name = "nvidia-nvjitlink-cu12", version = "12.8.61", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine != 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "nvidia-nvjitlink-cu12", version = "12.8.93", source = { registry = "https://pypi.org/simple" }, marker = "(sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, ] wheels = [ - { url = "https://files.pythonhosted.org/packages/2e/a2/313db0453087f5324a5900380ca2e57e050c8de76f407b5e11383dc762ae/nvidia_cusparse_cu12-12.5.7.53-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d869c6146ca80f4305b62e02d924b4aaced936f8173e3cef536a67eed2a91af1", size = 291963692, upload-time = "2025-01-23T17:59:40.325Z" }, - { url = "https://files.pythonhosted.org/packages/c2/ab/31e8149c66213b846c082a3b41b1365b831f41191f9f40c6ddbc8a7d550e/nvidia_cusparse_cu12-12.5.7.53-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3c1b61eb8c85257ea07e9354606b26397612627fdcd327bfd91ccf6155e7c86d", size = 292064180, upload-time = "2025-01-23T18:00:23.233Z" }, - { url = "https://files.pythonhosted.org/packages/7c/48/64b01653919a3d1d9b5117c156806ab0db8312c7496ff646477a5c1545bf/nvidia_cusparse_cu12-12.5.7.53-py3-none-win_amd64.whl", hash = "sha256:82c201d6781bacf6bb7c654f0446728d0fe596dfdd82ef4a04c204ce3e107441", size = 288767123, upload-time = "2025-01-23T18:11:01.543Z" }, + { url = "https://files.pythonhosted.org/packages/bc/f7/cd777c4109681367721b00a106f491e0d0d15cfa1fd59672ce580ce42a97/nvidia_cusparse_cu12-12.5.8.93-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:9b6c161cb130be1a07a27ea6923df8141f3c295852f4b260c65f18f3e0a091dc", size = 288117129, upload-time = "2025-03-07T01:47:40.407Z" }, + { url = "https://files.pythonhosted.org/packages/c2/f5/e1854cb2f2bcd4280c44736c93550cc300ff4b8c95ebe370d0aa7d2b473d/nvidia_cusparse_cu12-12.5.8.93-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1ec05d76bbbd8b61b06a80e1eaf8cf4959c3d4ce8e711b65ebd0443bb0ebb13b", size = 288216466, upload-time = "2025-03-07T01:48:13.779Z" }, + { url = "https://files.pythonhosted.org/packages/62/07/f3b2ad63f8e3d257a599f422ae34eb565e70c41031aecefa3d18b62cabd1/nvidia_cusparse_cu12-12.5.8.93-py3-none-win_amd64.whl", hash = "sha256:9a33604331cb2cac199f2e7f5104dfbb8a5a898c367a53dfda9ff2acb6b6b4dd", size = 284937404, upload-time = "2025-03-07T01:55:07.742Z" }, ] [[package]] @@ -2473,9 +8817,9 @@ name = "nvidia-cusparselt-cu12" version = "0.6.2" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "(python_full_version >= '3.12' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.12' and sys_platform != 'darwin' and sys_platform != 'linux')", - "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.11.*' and sys_platform != 'darwin' and sys_platform != 'linux')", - "(python_full_version < '3.11' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform != 'darwin' and sys_platform != 'linux')", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", ] wheels = [ { url = "https://files.pythonhosted.org/packages/98/8e/675498726c605c9441cf46653bd29cb1b8666da1fb1469ffa25f67f20c58/nvidia_cusparselt_cu12-0.6.2-py3-none-manylinux2014_aarch64.whl", hash = "sha256:067a7f6d03ea0d4841c85f0c6f1991c5dda98211f6302cb83a4ab234ee95bef8", size = 149422781, upload-time = "2024-07-23T17:35:27.203Z" }, @@ -2485,17 +8829,95 @@ wheels = [ [[package]] name = "nvidia-cusparselt-cu12" -version = "0.6.3" +version = "0.7.1" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "(python_full_version >= '3.12' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.12' and sys_platform != 'darwin' and sys_platform != 'linux')", - "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.11.*' and sys_platform != 'darwin' and sys_platform != 'linux')", - "(python_full_version < '3.11' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform != 'darwin' and sys_platform != 'linux')", -] -wheels = [ - { url = "https://files.pythonhosted.org/packages/62/da/4de092c61c6dea1fc9c936e69308a02531d122e12f1f649825934ad651b5/nvidia_cusparselt_cu12-0.6.3-py3-none-manylinux2014_aarch64.whl", hash = "sha256:8371549623ba601a06322af2133c4a44350575f5a3108fb75f3ef20b822ad5f1", size = 156402859, upload-time = "2024-10-16T02:23:17.184Z" }, - { url = "https://files.pythonhosted.org/packages/3b/9a/72ef35b399b0e183bc2e8f6f558036922d453c4d8237dab26c666a04244b/nvidia_cusparselt_cu12-0.6.3-py3-none-manylinux2014_x86_64.whl", hash = "sha256:e5c8a26c36445dd2e6812f1177978a24e2d37cacce7e090f297a688d1ec44f46", size = 156785796, upload-time = "2024-10-15T21:29:17.709Z" }, - { url = "https://files.pythonhosted.org/packages/46/3e/9e1e394a02a06f694be2c97bbe47288bb7c90ea84c7e9cf88f7b28afe165/nvidia_cusparselt_cu12-0.6.3-py3-none-win_amd64.whl", hash = "sha256:3b325bcbd9b754ba43df5a311488fca11a6b5dc3d11df4d190c000cf1a0765c7", size = 155595972, upload-time = "2024-10-15T22:58:35.426Z" }, + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/73/b9/598f6ff36faaece4b3c50d26f50e38661499ff34346f00e057760b35cc9d/nvidia_cusparselt_cu12-0.7.1-py3-none-manylinux2014_aarch64.whl", hash = "sha256:8878dce784d0fac90131b6817b607e803c36e629ba34dc5b433471382196b6a5", size = 283835557, upload-time = "2025-02-26T00:16:54.265Z" }, + { url = "https://files.pythonhosted.org/packages/56/79/12978b96bd44274fe38b5dde5cfb660b1d114f70a65ef962bcbbed99b549/nvidia_cusparselt_cu12-0.7.1-py3-none-manylinux2014_x86_64.whl", hash = "sha256:f1bb701d6b930d5a7cea44c19ceb973311500847f81b634d802b7b539dc55623", size = 287193691, upload-time = "2025-02-26T00:15:44.104Z" }, + { url = "https://files.pythonhosted.org/packages/2f/d8/a6b0d0d0c2435e9310f3e2bb0d9c9dd4c33daef86aa5f30b3681defd37ea/nvidia_cusparselt_cu12-0.7.1-py3-none-win_amd64.whl", hash = "sha256:f67fbb5831940ec829c9117b7f33807db9f9678dc2a617fbe781cac17b4e1075", size = 271020911, upload-time = "2025-02-26T00:14:47.204Z" }, ] [[package]] @@ -2511,9 +8933,9 @@ name = "nvidia-nccl-cu12" version = "2.21.5" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "(python_full_version >= '3.12' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.12' and sys_platform != 'darwin' and sys_platform != 'linux')", - "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.11.*' and sys_platform != 'darwin' and sys_platform != 'linux')", - "(python_full_version < '3.11' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform != 'darwin' and sys_platform != 'linux')", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", ] wheels = [ { url = "https://files.pythonhosted.org/packages/df/99/12cd266d6233f47d00daf3a72739872bdc10267d0383508b0b9c84a18bb6/nvidia_nccl_cu12-2.21.5-py3-none-manylinux2014_x86_64.whl", hash = "sha256:8579076d30a8c24988834445f8d633c697d42397e92ffc3f63fa26766d25e0a0", size = 188654414, upload-time = "2024-04-03T15:32:57.427Z" }, @@ -2521,16 +8943,94 @@ wheels = [ [[package]] name = "nvidia-nccl-cu12" -version = "2.26.2" +version = "2.28.9" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "(python_full_version >= '3.12' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.12' and sys_platform != 'darwin' and sys_platform != 'linux')", - "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.11.*' and sys_platform != 'darwin' and sys_platform != 'linux')", - "(python_full_version < '3.11' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform != 'darwin' and sys_platform != 'linux')", -] -wheels = [ - { url = "https://files.pythonhosted.org/packages/69/5b/ca2f213f637305633814ae8c36b153220e40a07ea001966dcd87391f3acb/nvidia_nccl_cu12-2.26.2-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5c196e95e832ad30fbbb50381eb3cbd1fadd5675e587a548563993609af19522", size = 291671495, upload-time = "2025-03-13T00:30:07.805Z" }, - { url = "https://files.pythonhosted.org/packages/67/ca/f42388aed0fddd64ade7493dbba36e1f534d4e6fdbdd355c6a90030ae028/nvidia_nccl_cu12-2.26.2-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:694cf3879a206553cc9d7dbda76b13efaf610fdb70a50cba303de1b0d1530ac6", size = 201319755, upload-time = "2025-03-13T00:29:55.296Z" }, + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/08/c4/120d2dfd92dff2c776d68f361ff8705fdea2ca64e20b612fab0fd3f581ac/nvidia_nccl_cu12-2.28.9-py3-none-manylinux_2_18_aarch64.whl", hash = "sha256:50a36e01c4a090b9f9c47d92cec54964de6b9fcb3362d0e19b8ffc6323c21b60", size = 296766525, upload-time = "2025-11-18T05:49:16.094Z" }, + { url = "https://files.pythonhosted.org/packages/4a/4e/44dbb46b3d1b0ec61afda8e84837870f2f9ace33c564317d59b70bc19d3e/nvidia_nccl_cu12-2.28.9-py3-none-manylinux_2_18_x86_64.whl", hash = "sha256:485776daa8447da5da39681af455aa3b2c2586ddcf4af8772495e7c532c7e5ab", size = 296782137, upload-time = "2025-11-18T05:49:34.248Z" }, ] [[package]] @@ -2538,9 +9038,9 @@ name = "nvidia-nvjitlink-cu12" version = "12.4.127" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "(python_full_version >= '3.12' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.12' and sys_platform != 'darwin' and sys_platform != 'linux')", - "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.11.*' and sys_platform != 'darwin' and sys_platform != 'linux')", - "(python_full_version < '3.11' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform != 'darwin' and sys_platform != 'linux')", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", ] wheels = [ { url = "https://files.pythonhosted.org/packages/02/45/239d52c05074898a80a900f49b1615d81c07fceadd5ad6c4f86a987c0bc4/nvidia_nvjitlink_cu12-12.4.127-py3-none-manylinux2014_aarch64.whl", hash = "sha256:4abe7fef64914ccfa909bc2ba39739670ecc9e820c83ccc7a6ed414122599b83", size = 20552510, upload-time = "2024-06-18T20:20:13.871Z" }, @@ -2550,17 +9050,104 @@ wheels = [ [[package]] name = "nvidia-nvjitlink-cu12" -version = "12.8.61" +version = "12.8.93" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "(python_full_version >= '3.12' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.12' and sys_platform != 'darwin' and sys_platform != 'linux')", - "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.11.*' and sys_platform != 'darwin' and sys_platform != 'linux')", - "(python_full_version < '3.11' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform != 'darwin' and sys_platform != 'linux')", -] -wheels = [ - { url = "https://files.pythonhosted.org/packages/03/f8/9d85593582bd99b8d7c65634d2304780aefade049b2b94d96e44084be90b/nvidia_nvjitlink_cu12-12.8.61-py3-none-manylinux2010_x86_64.manylinux_2_12_x86_64.whl", hash = "sha256:45fd79f2ae20bd67e8bc411055939049873bfd8fac70ff13bd4865e0b9bdab17", size = 39243473, upload-time = "2025-01-23T18:03:03.509Z" }, - { url = "https://files.pythonhosted.org/packages/af/53/698f3758f48c5fcb1112721e40cc6714da3980d3c7e93bae5b29dafa9857/nvidia_nvjitlink_cu12-12.8.61-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:9b80ecab31085dda3ce3b41d043be0ec739216c3fc633b8abe212d5a30026df0", size = 38374634, upload-time = "2025-01-23T18:02:35.812Z" }, - { url = "https://files.pythonhosted.org/packages/7f/c6/0d1b2bfeb2ef42c06db0570c4d081e5cde4450b54c09e43165126cfe6ff6/nvidia_nvjitlink_cu12-12.8.61-py3-none-win_amd64.whl", hash = "sha256:1166a964d25fdc0eae497574d38824305195a5283324a21ccb0ce0c802cbf41c", size = 268514099, upload-time = "2025-01-23T18:12:33.874Z" }, + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/f6/74/86a07f1d0f42998ca31312f998bd3b9a7eff7f52378f4f270c8679c77fb9/nvidia_nvjitlink_cu12-12.8.93-py3-none-manylinux2010_x86_64.manylinux_2_12_x86_64.whl", hash = "sha256:81ff63371a7ebd6e6451970684f916be2eab07321b73c9d244dc2b4da7f73b88", size = 39254836, upload-time = "2025-03-07T01:49:55.661Z" }, + { url = "https://files.pythonhosted.org/packages/2a/a2/8cee5da30d13430e87bf99bb33455d2724d0a4a9cb5d7926d80ccb96d008/nvidia_nvjitlink_cu12-12.8.93-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:adccd7161ace7261e01bb91e44e88da350895c270d23f744f0820c818b7229e7", size = 38386204, upload-time = "2025-03-07T01:49:43.612Z" }, + { url = "https://files.pythonhosted.org/packages/ed/d7/34f02dad2e30c31b10a51f6b04e025e5dd60e5f936af9045a9b858a05383/nvidia_nvjitlink_cu12-12.8.93-py3-none-win_amd64.whl", hash = "sha256:bd93fbeeee850917903583587f4fc3a4eafa022e34572251368238ab5e6bd67f", size = 268553710, upload-time = "2025-03-07T01:56:24.13Z" }, +] + +[[package]] +name = "nvidia-nvshmem-cu12" +version = "3.4.5" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1d/6a/03aa43cc9bd3ad91553a88b5f6fb25ed6a3752ae86ce2180221962bc2aa5/nvidia_nvshmem_cu12-3.4.5-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0b48363fc6964dede448029434c6abed6c5e37f823cb43c3bcde7ecfc0457e15", size = 138936938, upload-time = "2025-09-06T00:32:05.589Z" }, + { url = "https://files.pythonhosted.org/packages/b5/09/6ea3ea725f82e1e76684f0708bbedd871fc96da89945adeba65c3835a64c/nvidia_nvshmem_cu12-3.4.5-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:042f2500f24c021db8a06c5eec2539027d57460e1c1a762055a6554f72c369bd", size = 139103095, upload-time = "2025-09-06T00:32:31.266Z" }, ] [[package]] @@ -2579,9 +9166,9 @@ name = "nvidia-nvtx-cu12" version = "12.4.127" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "(python_full_version >= '3.12' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.12' and sys_platform != 'darwin' and sys_platform != 'linux')", - "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.11.*' and sys_platform != 'darwin' and sys_platform != 'linux')", - "(python_full_version < '3.11' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform != 'darwin' and sys_platform != 'linux')", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", ] wheels = [ { url = "https://files.pythonhosted.org/packages/06/39/471f581edbb7804b39e8063d92fc8305bdc7a80ae5c07dbe6ea5c50d14a5/nvidia_nvtx_cu12-12.4.127-py3-none-manylinux2014_aarch64.whl", hash = "sha256:7959ad635db13edf4fc65c06a6e9f9e55fc2f92596db928d169c0bb031e88ef3", size = 100417, upload-time = "2024-06-18T20:16:22.484Z" }, @@ -2591,17 +9178,95 @@ wheels = [ [[package]] name = "nvidia-nvtx-cu12" -version = "12.8.55" +version = "12.8.90" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "(python_full_version >= '3.12' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.12' and sys_platform != 'darwin' and sys_platform != 'linux')", - "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.11.*' and sys_platform != 'darwin' and sys_platform != 'linux')", - "(python_full_version < '3.11' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform != 'darwin' and sys_platform != 'linux')", -] -wheels = [ - { url = "https://files.pythonhosted.org/packages/bb/e8/ae6ecbdade8bb9174d75db2b302c57c1c27d9277d6531c62aafde5fb32a3/nvidia_nvtx_cu12-12.8.55-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c38405335fbc0f0bf363eaeaeb476e8dfa8bae82fada41d25ace458b9ba9f3db", size = 91103, upload-time = "2025-01-23T17:50:24.664Z" }, - { url = "https://files.pythonhosted.org/packages/8d/cd/0e8c51b2ae3a58f054f2e7fe91b82d201abfb30167f2431e9bd92d532f42/nvidia_nvtx_cu12-12.8.55-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2dd0780f1a55c21d8e06a743de5bd95653de630decfff40621dbde78cc307102", size = 89896, upload-time = "2025-01-23T17:50:44.487Z" }, - { url = "https://files.pythonhosted.org/packages/e5/14/84d46e62bfde46dd20cfb041e0bb5c2ec454fd6a384696e7fa3463c5bb59/nvidia_nvtx_cu12-12.8.55-py3-none-win_amd64.whl", hash = "sha256:9022681677aef1313458f88353ad9c0d2fbbe6402d6b07c9f00ba0e3ca8774d3", size = 56435, upload-time = "2025-01-23T18:06:06.268Z" }, + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/10/c0/1b303feea90d296f6176f32a2a70b5ef230f9bdeb3a72bddb0dc922dc137/nvidia_nvtx_cu12-12.8.90-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d7ad891da111ebafbf7e015d34879f7112832fc239ff0d7d776b6cb685274615", size = 91161, upload-time = "2025-03-07T01:42:23.922Z" }, + { url = "https://files.pythonhosted.org/packages/a2/eb/86626c1bbc2edb86323022371c39aa48df6fd8b0a1647bc274577f72e90b/nvidia_nvtx_cu12-12.8.90-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5b17e2001cc0d751a5bc2c6ec6d26ad95913324a4adb86788c944f8ce9ba441f", size = 89954, upload-time = "2025-03-07T01:42:44.131Z" }, + { url = "https://files.pythonhosted.org/packages/9f/99/4c9c0c329bf9fc125008c3b54c7c94c0023518d06fc025ae36431375e1fe/nvidia_nvtx_cu12-12.8.90-py3-none-win_amd64.whl", hash = "sha256:619c8304aedc69f02ea82dd244541a83c3d9d40993381b3b590f1adaed3db41e", size = 56492, upload-time = "2025-03-07T01:52:24.69Z" }, ] [[package]] @@ -2619,60 +9284,65 @@ wheels = [ [[package]] name = "onnx" -version = "1.18.0" +version = "1.21.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "numpy" }, + { name = "ml-dtypes" }, + { name = "numpy", version = "1.26.4", source = { registry = "https://pypi.org/simple" }, marker = "extra == 'extra-16-inference-models-onnx-jp6-cu126' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and extra == 'extra-16-inference-models-falcon-perception') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "2.4.4", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and extra == 'extra-16-inference-models-falcon-perception') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, { name = "protobuf" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/3d/60/e56e8ec44ed34006e6d4a73c92a04d9eea6163cc12440e35045aec069175/onnx-1.18.0.tar.gz", hash = "sha256:3d8dbf9e996629131ba3aa1afd1d8239b660d1f830c6688dd7e03157cccd6b9c", size = 12563009, upload-time = "2025-05-12T22:03:09.626Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c5/93/942d2a0f6a70538eea042ce0445c8aefd46559ad153469986f29a743c01c/onnx-1.21.0.tar.gz", hash = "sha256:4d8b67d0aaec5864c87633188b91cc520877477ec0254eda122bef8be43cd764", size = 12074608, upload-time = "2026-03-27T21:33:36.118Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/8e/e3/ab8a09c0af43373e0422de461956a1737581325260659aeffae22a7dad18/onnx-1.18.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:4a3b50d94620e2c7c1404d1d59bc53e665883ae3fecbd856cc86da0639fd0fc3", size = 18280145, upload-time = "2025-05-12T22:01:49.875Z" }, - { url = "https://files.pythonhosted.org/packages/04/5b/3cfd183961a0a872fe29c95f8d07264890ec65c75c94b99a4dabc950df29/onnx-1.18.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e189652dad6e70a0465035c55cc565c27aa38803dd4f4e74e4b952ee1c2de94b", size = 17422721, upload-time = "2025-05-12T22:01:52.841Z" }, - { url = "https://files.pythonhosted.org/packages/58/52/fa649429016c5790f68c614cdebfbefd3e72ba1c458966305297d540f713/onnx-1.18.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfb1f271b1523b29f324bfd223f6a4cfbdc5a2f2f16e73563671932d33663365", size = 17584220, upload-time = "2025-05-12T22:01:56.458Z" }, - { url = "https://files.pythonhosted.org/packages/42/52/dc166de41a5f72738b0bdfb2a19e0ebe4743cf3ecc9ae381ea3425bcb332/onnx-1.18.0-cp310-cp310-win32.whl", hash = "sha256:e03071041efd82e0317b3c45433b2f28146385b80f26f82039bc68048ac1a7a0", size = 15734494, upload-time = "2025-05-12T22:01:59.704Z" }, - { url = "https://files.pythonhosted.org/packages/a6/f9/e766a3b85b7651ddfc5f9648e0e9dc24e88b7e88ea7f8c23187530e818ea/onnx-1.18.0-cp310-cp310-win_amd64.whl", hash = "sha256:9235b3493951e11e75465d56f4cd97e3e9247f096160dd3466bfabe4cbc938bc", size = 15848421, upload-time = "2025-05-12T22:02:03.01Z" }, - { url = "https://files.pythonhosted.org/packages/ed/3a/a336dac4db1eddba2bf577191e5b7d3e4c26fcee5ec518a5a5b11d13540d/onnx-1.18.0-cp311-cp311-macosx_12_0_universal2.whl", hash = "sha256:735e06d8d0cf250dc498f54038831401063c655a8d6e5975b2527a4e7d24be3e", size = 18281831, upload-time = "2025-05-12T22:02:06.429Z" }, - { url = "https://files.pythonhosted.org/packages/02/3a/56475a111120d1e5d11939acbcbb17c92198c8e64a205cd68e00bdfd8a1f/onnx-1.18.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:73160799472e1a86083f786fecdf864cf43d55325492a9b5a1cfa64d8a523ecc", size = 17424359, upload-time = "2025-05-12T22:02:09.866Z" }, - { url = "https://files.pythonhosted.org/packages/cf/03/5eb5e9ef446ed9e78c4627faf3c1bc25e0f707116dd00e9811de232a8df5/onnx-1.18.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6acafb3823238bbe8f4340c7ac32fb218689442e074d797bee1c5c9a02fdae75", size = 17586006, upload-time = "2025-05-12T22:02:13.217Z" }, - { url = "https://files.pythonhosted.org/packages/b0/4e/70943125729ce453271a6e46bb847b4a612496f64db6cbc6cb1f49f41ce1/onnx-1.18.0-cp311-cp311-win32.whl", hash = "sha256:4c8c4bbda760c654e65eaffddb1a7de71ec02e60092d33f9000521f897c99be9", size = 15734988, upload-time = "2025-05-12T22:02:16.561Z" }, - { url = "https://files.pythonhosted.org/packages/44/b0/435fd764011911e8f599e3361f0f33425b1004662c1ea33a0ad22e43db2d/onnx-1.18.0-cp311-cp311-win_amd64.whl", hash = "sha256:a5810194f0f6be2e58c8d6dedc6119510df7a14280dd07ed5f0f0a85bd74816a", size = 15849576, upload-time = "2025-05-12T22:02:19.569Z" }, - { url = "https://files.pythonhosted.org/packages/6c/f0/9e31f4b4626d60f1c034f71b411810bc9fafe31f4e7dd3598effd1b50e05/onnx-1.18.0-cp311-cp311-win_arm64.whl", hash = "sha256:aa1b7483fac6cdec26922174fc4433f8f5c2f239b1133c5625063bb3b35957d0", size = 15822961, upload-time = "2025-05-12T22:02:22.735Z" }, - { url = "https://files.pythonhosted.org/packages/a7/fe/16228aca685392a7114625b89aae98b2dc4058a47f0f467a376745efe8d0/onnx-1.18.0-cp312-cp312-macosx_12_0_universal2.whl", hash = "sha256:521bac578448667cbb37c50bf05b53c301243ede8233029555239930996a625b", size = 18285770, upload-time = "2025-05-12T22:02:26.116Z" }, - { url = "https://files.pythonhosted.org/packages/1e/77/ba50a903a9b5e6f9be0fa50f59eb2fca4a26ee653375408fbc72c3acbf9f/onnx-1.18.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4da451bf1c5ae381f32d430004a89f0405bc57a8471b0bddb6325a5b334aa40", size = 17421291, upload-time = "2025-05-12T22:02:29.645Z" }, - { url = "https://files.pythonhosted.org/packages/11/23/25ec2ba723ac62b99e8fed6d7b59094dadb15e38d4c007331cc9ae3dfa5f/onnx-1.18.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99afac90b4cdb1471432203c3c1f74e16549c526df27056d39f41a9a47cfb4af", size = 17584084, upload-time = "2025-05-12T22:02:32.789Z" }, - { url = "https://files.pythonhosted.org/packages/6a/4d/2c253a36070fb43f340ff1d2c450df6a9ef50b938adcd105693fee43c4ee/onnx-1.18.0-cp312-cp312-win32.whl", hash = "sha256:ee159b41a3ae58d9c7341cf432fc74b96aaf50bd7bb1160029f657b40dc69715", size = 15734892, upload-time = "2025-05-12T22:02:35.527Z" }, - { url = "https://files.pythonhosted.org/packages/e8/92/048ba8fafe6b2b9a268ec2fb80def7e66c0b32ab2cae74de886981f05a27/onnx-1.18.0-cp312-cp312-win_amd64.whl", hash = "sha256:102c04edc76b16e9dfeda5a64c1fccd7d3d2913b1544750c01d38f1ac3c04e05", size = 15850336, upload-time = "2025-05-12T22:02:38.545Z" }, - { url = "https://files.pythonhosted.org/packages/a1/66/bbc4ffedd44165dcc407a51ea4c592802a5391ce3dc94aa5045350f64635/onnx-1.18.0-cp312-cp312-win_arm64.whl", hash = "sha256:911b37d724a5d97396f3c2ef9ea25361c55cbc9aa18d75b12a52b620b67145af", size = 15823802, upload-time = "2025-05-12T22:02:42.037Z" }, + { url = "https://files.pythonhosted.org/packages/a8/28/a14b1845bf9302c3a787221e8f37cde4e7f930e10d95a8e22dd910aeb41d/onnx-1.21.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:e0c21cc5c7a41d1a509828e2b14fe9c30e807c6df611ec0fd64a47b8d4b16abd", size = 17966899, upload-time = "2026-03-27T21:32:15.53Z" }, + { url = "https://files.pythonhosted.org/packages/41/7b/788881bf022a4cfb7b0843782f88415ea51c805cee4a909dcf2e49bb8129/onnx-1.21.0-cp310-cp310-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e1931bfcc222a4c9da6475f2ffffb84b97ab3876041ec639171c11ce802bee6a", size = 17534297, upload-time = "2026-03-27T21:32:18.343Z" }, + { url = "https://files.pythonhosted.org/packages/16/51/eb64d4f2ec6caa98909aab5fbcfa24be9c059081e804bbb0012cc549ef89/onnx-1.21.0-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c9b56ad04039fac6b028c07e54afa1ec7f75dd340f65311f2c292e41ed7aa4d9", size = 17616697, upload-time = "2026-03-27T21:32:21Z" }, + { url = "https://files.pythonhosted.org/packages/d2/4e/6b1f7800dae3407dc850e7e59d591ed8c83e9b3401e4cd57a1f612e400c6/onnx-1.21.0-cp310-cp310-win32.whl", hash = "sha256:3abd09872523c7e0362d767e4e63bd7c6bac52a5e2c3edbf061061fe540e2027", size = 16288893, upload-time = "2026-03-27T21:32:23.864Z" }, + { url = "https://files.pythonhosted.org/packages/a2/a8/89273e581d3943e20314af19b1596ab4d763f9c2eb07d4eaf4fb0593219b/onnx-1.21.0-cp310-cp310-win_amd64.whl", hash = "sha256:f2c7c234c568402e10db74e33d787e4144e394ae2bcbbf11000fbfe2e017ad68", size = 16443416, upload-time = "2026-03-27T21:32:26.655Z" }, + { url = "https://files.pythonhosted.org/packages/45/48/32e383aa6bc40b72a9fd419937aaa647078190c9bfccdc97b316d2dee687/onnx-1.21.0-cp311-cp311-macosx_12_0_universal2.whl", hash = "sha256:2aca19949260875c14866fc77ea0bc37e4e809b24976108762843d328c92d3ce", size = 17968053, upload-time = "2026-03-27T21:32:29.558Z" }, + { url = "https://files.pythonhosted.org/packages/e2/26/5726e8df7d36e96bb3c679912d1a86af42f393d77aa17d6b98a97d4289ce/onnx-1.21.0-cp311-cp311-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:82aa6ab51144df07c58c4850cb78d4f1ae969d8c0bf657b28041796d49ba6974", size = 17534821, upload-time = "2026-03-27T21:32:32.351Z" }, + { url = "https://files.pythonhosted.org/packages/d6/2b/021dcd2dd50c3c71b7959d7368526da384a295c162fb4863f36057973f78/onnx-1.21.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:10c3185a232089335581fabb98fba4e86d3e8246b8140f2e406082438100ebda", size = 17616664, upload-time = "2026-03-27T21:32:34.921Z" }, + { url = "https://files.pythonhosted.org/packages/12/00/afa32a46fa122a7ed42df1cfe8796922156a3725ba8fc581c4779c96e2fc/onnx-1.21.0-cp311-cp311-win32.whl", hash = "sha256:f53b3c15a3b539c16b99655c43c365622046d68c49b680c48eba4da2a4fb6f27", size = 16289035, upload-time = "2026-03-27T21:32:37.783Z" }, + { url = "https://files.pythonhosted.org/packages/73/8d/483cc980a24d4c0131d0af06d0ff6a37fb08ae90a7848ece8cef645194f1/onnx-1.21.0-cp311-cp311-win_amd64.whl", hash = "sha256:5f78c411743db317a76e5d009f84f7e3d5380411a1567a868e82461a1e5c775d", size = 16443748, upload-time = "2026-03-27T21:32:40.337Z" }, + { url = "https://files.pythonhosted.org/packages/38/78/9d06fd5aaaed1ec9cb8a3b70fbbf00c1bdc18db610771e96379f0ed58112/onnx-1.21.0-cp311-cp311-win_arm64.whl", hash = "sha256:ab6a488dabbb172eebc9f3b3e7ac68763f32b0c571626d4a5004608f866cc83d", size = 16406123, upload-time = "2026-03-27T21:32:45.159Z" }, + { url = "https://files.pythonhosted.org/packages/7d/ae/cb644ec84c25e63575d9d8790fdcc5d1a11d67d3f62f872edb35fa38d158/onnx-1.21.0-cp312-abi3-macosx_12_0_universal2.whl", hash = "sha256:fc2635400fe39ff37ebc4e75342cc54450eadadf39c540ff132c319bf4960095", size = 17965930, upload-time = "2026-03-27T21:32:48.089Z" }, + { url = "https://files.pythonhosted.org/packages/6f/b6/eeb5903586645ef8a49b4b7892580438741acc3df91d7a5bd0f3a59ea9cb/onnx-1.21.0-cp312-abi3-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9003d5206c01fa2ff4b46311566865d8e493e1a6998d4009ec6de39843f1b59b", size = 17531344, upload-time = "2026-03-27T21:32:50.837Z" }, + { url = "https://files.pythonhosted.org/packages/a7/00/4823f06357892d1e60d6f34e7299d2ba4ed2108c487cc394f7ce85a3ff14/onnx-1.21.0-cp312-abi3-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a9261bd580fb8548c9c37b3c6750387eb8f21ea43c63880d37b2c622e1684285", size = 17613697, upload-time = "2026-03-27T21:32:54.222Z" }, + { url = "https://files.pythonhosted.org/packages/23/1d/391f3c567ae068c8ac4f1d1316bae97c9eb45e702f05975fe0e17ad441f0/onnx-1.21.0-cp312-abi3-win32.whl", hash = "sha256:9ea4e824964082811938a9250451d89c4ec474fe42dd36c038bfa5df31993d1e", size = 16287200, upload-time = "2026-03-27T21:32:57.277Z" }, + { url = "https://files.pythonhosted.org/packages/9c/a6/5eefbe5b40ea96de95a766bd2e0e751f35bdea2d4b951991ec9afaa69531/onnx-1.21.0-cp312-abi3-win_amd64.whl", hash = "sha256:458d91948ad9a7729a347550553b49ab6939f9af2cddf334e2116e45467dc61f", size = 16441045, upload-time = "2026-03-27T21:33:00.081Z" }, + { url = "https://files.pythonhosted.org/packages/63/c4/0ed8dc037a39113d2a4d66e0005e07751c299c46b993f1ad5c2c35664c20/onnx-1.21.0-cp312-abi3-win_arm64.whl", hash = "sha256:ca14bc4842fccc3187eb538f07eabeb25a779b39388b006db4356c07403a7bbb", size = 16403134, upload-time = "2026-03-27T21:33:03.987Z" }, ] [[package]] name = "onnxruntime" -version = "1.22.0" +version = "1.22.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "coloredlogs" }, { name = "flatbuffers" }, - { name = "numpy" }, + { name = "numpy", version = "1.26.4", source = { registry = "https://pypi.org/simple" }, marker = "(extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "2.4.4", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, { name = "packaging" }, { name = "protobuf" }, - { name = "sympy", version = "1.13.1", source = { registry = "https://pypi.org/simple" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "sympy", version = "1.14.0", source = { registry = "https://pypi.org/simple" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "sympy", version = "1.13.1", source = { registry = "https://pypi.org/simple" }, marker = "(extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "sympy", version = "1.14.0", source = { registry = "https://pypi.org/simple" }, marker = "(extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, ] wheels = [ - { url = "https://files.pythonhosted.org/packages/67/3c/c99b21646a782b89c33cffd96fdee02a81bc43f0cb651de84d58ec11e30e/onnxruntime-1.22.0-cp310-cp310-macosx_13_0_universal2.whl", hash = "sha256:85d8826cc8054e4d6bf07f779dc742a363c39094015bdad6a08b3c18cfe0ba8c", size = 34273493, upload-time = "2025-05-09T20:25:55.66Z" }, - { url = "https://files.pythonhosted.org/packages/54/ab/fd9a3b5285008c060618be92e475337fcfbf8689787953d37273f7b52ab0/onnxruntime-1.22.0-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:468c9502a12f6f49ec335c2febd22fdceecc1e4cc96dfc27e419ba237dff5aff", size = 14445346, upload-time = "2025-05-09T20:25:41.322Z" }, - { url = "https://files.pythonhosted.org/packages/1f/ca/a5625644bc079e04e3076a5ac1fb954d1e90309b8eb987a4f800732ffee6/onnxruntime-1.22.0-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:681fe356d853630a898ee05f01ddb95728c9a168c9460e8361d0a240c9b7cb97", size = 16392959, upload-time = "2025-05-09T20:26:09.047Z" }, - { url = "https://files.pythonhosted.org/packages/6d/6b/8267490476e8d4dd1883632c7e46a4634384c7ff1c35ae44edc8ab0bb7a9/onnxruntime-1.22.0-cp310-cp310-win_amd64.whl", hash = "sha256:20bca6495d06925631e201f2b257cc37086752e8fe7b6c83a67c6509f4759bc9", size = 12689974, upload-time = "2025-05-12T21:26:09.704Z" }, - { url = "https://files.pythonhosted.org/packages/7a/08/c008711d1b92ff1272f4fea0fbee57723171f161d42e5c680625535280af/onnxruntime-1.22.0-cp311-cp311-macosx_13_0_universal2.whl", hash = "sha256:8d6725c5b9a681d8fe72f2960c191a96c256367887d076b08466f52b4e0991df", size = 34282151, upload-time = "2025-05-09T20:25:59.246Z" }, - { url = "https://files.pythonhosted.org/packages/3e/8b/22989f6b59bc4ad1324f07a945c80b9ab825f0a581ad7a6064b93716d9b7/onnxruntime-1.22.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fef17d665a917866d1f68f09edc98223b9a27e6cb167dec69da4c66484ad12fd", size = 14446302, upload-time = "2025-05-09T20:25:44.299Z" }, - { url = "https://files.pythonhosted.org/packages/7a/d5/aa83d084d05bc8f6cf8b74b499c77431ffd6b7075c761ec48ec0c161a47f/onnxruntime-1.22.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b978aa63a9a22095479c38371a9b359d4c15173cbb164eaad5f2cd27d666aa65", size = 16393496, upload-time = "2025-05-09T20:26:11.588Z" }, - { url = "https://files.pythonhosted.org/packages/89/a5/1c6c10322201566015183b52ef011dfa932f5dd1b278de8d75c3b948411d/onnxruntime-1.22.0-cp311-cp311-win_amd64.whl", hash = "sha256:03d3ef7fb11adf154149d6e767e21057e0e577b947dd3f66190b212528e1db31", size = 12691517, upload-time = "2025-05-12T21:26:13.354Z" }, - { url = "https://files.pythonhosted.org/packages/4d/de/9162872c6e502e9ac8c99a98a8738b2fab408123d11de55022ac4f92562a/onnxruntime-1.22.0-cp312-cp312-macosx_13_0_universal2.whl", hash = "sha256:f3c0380f53c1e72a41b3f4d6af2ccc01df2c17844072233442c3a7e74851ab97", size = 34298046, upload-time = "2025-05-09T20:26:02.399Z" }, - { url = "https://files.pythonhosted.org/packages/03/79/36f910cd9fc96b444b0e728bba14607016079786adf032dae61f7c63b4aa/onnxruntime-1.22.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c8601128eaef79b636152aea76ae6981b7c9fc81a618f584c15d78d42b310f1c", size = 14443220, upload-time = "2025-05-09T20:25:47.078Z" }, - { url = "https://files.pythonhosted.org/packages/8c/60/16d219b8868cc8e8e51a68519873bdb9f5f24af080b62e917a13fff9989b/onnxruntime-1.22.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6964a975731afc19dc3418fad8d4e08c48920144ff590149429a5ebe0d15fb3c", size = 16406377, upload-time = "2025-05-09T20:26:14.478Z" }, - { url = "https://files.pythonhosted.org/packages/36/b4/3f1c71ce1d3d21078a6a74c5483bfa2b07e41a8d2b8fb1e9993e6a26d8d3/onnxruntime-1.22.0-cp312-cp312-win_amd64.whl", hash = "sha256:c0d534a43d1264d1273c2d4f00a5a588fa98d21117a3345b7104fa0bbcaadb9a", size = 12692233, upload-time = "2025-05-12T21:26:16.963Z" }, + { url = "https://files.pythonhosted.org/packages/76/b9/664a1ffee62fa51529fac27b37409d5d28cadee8d97db806fcba68339b7e/onnxruntime-1.22.1-cp310-cp310-macosx_13_0_universal2.whl", hash = "sha256:80e7f51da1f5201c1379b8d6ef6170505cd800e40da216290f5e06be01aadf95", size = 34319864, upload-time = "2025-07-10T19:15:15.371Z" }, + { url = "https://files.pythonhosted.org/packages/b9/64/bc7221e92c994931024e22b22401b962c299e991558c3d57f7e34538b4b9/onnxruntime-1.22.1-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b89ddfdbbdaf7e3a59515dee657f6515601d55cb21a0f0f48c81aefc54ff1b73", size = 14472246, upload-time = "2025-07-10T19:15:19.403Z" }, + { url = "https://files.pythonhosted.org/packages/84/57/901eddbfb59ac4d008822b236450d5765cafcd450c787019416f8d3baf11/onnxruntime-1.22.1-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bddc75868bcf6f9ed76858a632f65f7b1846bdcefc6d637b1e359c2c68609964", size = 16459905, upload-time = "2025-07-10T19:15:21.749Z" }, + { url = "https://files.pythonhosted.org/packages/de/90/d6a1eb9b47e66a18afe7d1cf7cf0b2ef966ffa6f44d9f32d94c2be2860fb/onnxruntime-1.22.1-cp310-cp310-win_amd64.whl", hash = "sha256:01e2f21b2793eb0c8642d2be3cee34cc7d96b85f45f6615e4e220424158877ce", size = 12689001, upload-time = "2025-07-10T19:15:23.848Z" }, + { url = "https://files.pythonhosted.org/packages/82/ff/4a1a6747e039ef29a8d4ee4510060e9a805982b6da906a3da2306b7a3be6/onnxruntime-1.22.1-cp311-cp311-macosx_13_0_universal2.whl", hash = "sha256:f4581bccb786da68725d8eac7c63a8f31a89116b8761ff8b4989dc58b61d49a0", size = 34324148, upload-time = "2025-07-10T19:15:26.584Z" }, + { url = "https://files.pythonhosted.org/packages/0b/05/9f1929723f1cca8c9fb1b2b97ac54ce61362c7201434d38053ea36ee4225/onnxruntime-1.22.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7ae7526cf10f93454beb0f751e78e5cb7619e3b92f9fc3bd51aa6f3b7a8977e5", size = 14473779, upload-time = "2025-07-10T19:15:30.183Z" }, + { url = "https://files.pythonhosted.org/packages/59/f3/c93eb4167d4f36ea947930f82850231f7ce0900cb00e1a53dc4995b60479/onnxruntime-1.22.1-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f6effa1299ac549a05c784d50292e3378dbbf010346ded67400193b09ddc2f04", size = 16460799, upload-time = "2025-07-10T19:15:33.005Z" }, + { url = "https://files.pythonhosted.org/packages/a8/01/e536397b03e4462d3260aee5387e6f606c8fa9d2b20b1728f988c3c72891/onnxruntime-1.22.1-cp311-cp311-win_amd64.whl", hash = "sha256:f28a42bb322b4ca6d255531bb334a2b3e21f172e37c1741bd5e66bc4b7b61f03", size = 12689881, upload-time = "2025-07-10T19:15:35.501Z" }, + { url = "https://files.pythonhosted.org/packages/48/70/ca2a4d38a5deccd98caa145581becb20c53684f451e89eb3a39915620066/onnxruntime-1.22.1-cp312-cp312-macosx_13_0_universal2.whl", hash = "sha256:a938d11c0dc811badf78e435daa3899d9af38abee950d87f3ab7430eb5b3cf5a", size = 34342883, upload-time = "2025-07-10T19:15:38.223Z" }, + { url = "https://files.pythonhosted.org/packages/29/e5/00b099b4d4f6223b610421080d0eed9327ef9986785c9141819bbba0d396/onnxruntime-1.22.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:984cea2a02fcc5dfea44ade9aca9fe0f7a8a2cd6f77c258fc4388238618f3928", size = 14473861, upload-time = "2025-07-10T19:15:42.911Z" }, + { url = "https://files.pythonhosted.org/packages/0a/50/519828a5292a6ccd8d5cd6d2f72c6b36ea528a2ef68eca69647732539ffa/onnxruntime-1.22.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2d39a530aff1ec8d02e365f35e503193991417788641b184f5b1e8c9a6d5ce8d", size = 16475713, upload-time = "2025-07-10T19:15:45.452Z" }, + { url = "https://files.pythonhosted.org/packages/5d/54/7139d463bb0a312890c9a5db87d7815d4a8cce9e6f5f28d04f0b55fcb160/onnxruntime-1.22.1-cp312-cp312-win_amd64.whl", hash = "sha256:6a64291d57ea966a245f749eb970f4fa05a64d26672e05a83fdb5db6b7d62f87", size = 12690910, upload-time = "2025-07-10T19:15:47.478Z" }, ] [[package]] @@ -2680,24 +9350,206 @@ name = "onnxruntime-gpu" version = "1.20.1" source = { registry = "https://aiinfra.pkgs.visualstudio.com/PublicPackages/_packaging/onnxruntime-cuda-11/pypi/simple/" } resolution-markers = [ - "python_full_version >= '3.12' and sys_platform == 'darwin'", - "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux'", - "(python_full_version >= '3.12' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.12' and sys_platform != 'darwin' and sys_platform != 'linux')", - "python_full_version == '3.11.*' and sys_platform == 'darwin'", - "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux'", - "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.11.*' and sys_platform != 'darwin' and sys_platform != 'linux')", - "python_full_version < '3.11' and sys_platform == 'darwin'", - "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux'", - "(python_full_version < '3.11' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform != 'darwin' and sys_platform != 'linux')", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", ] dependencies = [ { name = "coloredlogs" }, { name = "flatbuffers" }, - { name = "numpy" }, + { name = "numpy", version = "1.26.4", source = { registry = "https://pypi.org/simple" }, marker = "(extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cu118') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "2.4.4", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, { name = "packaging" }, { name = "protobuf" }, - { name = "sympy", version = "1.13.1", source = { registry = "https://pypi.org/simple" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "sympy", version = "1.14.0", source = { registry = "https://pypi.org/simple" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "sympy", version = "1.13.1", source = { registry = "https://pypi.org/simple" }, marker = "(extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "sympy", version = "1.14.0", source = { registry = "https://pypi.org/simple" }, marker = "(extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, ] wheels = [ { url = "https://aiinfra.pkgs.visualstudio.com/2692857e-05ef-43b4-ba9c-ccf1c22c437c/_packaging/9c975526-0258-4aac-9e34-f8b3551decdd/pypi/download/onnxruntime-gpu/1.20.1/onnxruntime_gpu-1.20.1-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e3f9147b82aabe735208978345f31c1431a990586a262162602f643ad8ec4f92" }, @@ -2713,24 +9565,206 @@ name = "onnxruntime-gpu" version = "1.22.0" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "python_full_version >= '3.12' and sys_platform == 'darwin'", - "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux'", - "(python_full_version >= '3.12' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.12' and sys_platform != 'darwin' and sys_platform != 'linux')", - "python_full_version == '3.11.*' and sys_platform == 'darwin'", - "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux'", - "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.11.*' and sys_platform != 'darwin' and sys_platform != 'linux')", - "python_full_version < '3.11' and sys_platform == 'darwin'", - "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux'", - "(python_full_version < '3.11' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform != 'darwin' and sys_platform != 'linux')", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", ] dependencies = [ { name = "coloredlogs" }, { name = "flatbuffers" }, - { name = "numpy" }, + { name = "numpy", version = "1.26.4", source = { registry = "https://pypi.org/simple" }, marker = "(extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cu12') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "2.4.4", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cu12') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, { name = "packaging" }, { name = "protobuf" }, - { name = "sympy", version = "1.13.1", source = { registry = "https://pypi.org/simple" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "sympy", version = "1.14.0", source = { registry = "https://pypi.org/simple" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "sympy", version = "1.13.1", source = { registry = "https://pypi.org/simple" }, marker = "(extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "sympy", version = "1.14.0", source = { registry = "https://pypi.org/simple" }, marker = "(extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, ] wheels = [ { url = "https://files.pythonhosted.org/packages/27/76/81de592072d6a41553b1523e15447f0ef94392e8f4cb98fda42909f24f9b/onnxruntime_gpu-1.22.0-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:965da7d33a54917e8e5176f292cc22640819f328370f4fb86087908745b03708", size = 283205327, upload-time = "2025-05-09T19:39:24.231Z" }, @@ -2751,13 +9785,13 @@ resolution-markers = [ "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux'", ] dependencies = [ - { name = "coloredlogs", marker = "(platform_machine == 'aarch64' and sys_platform == 'linux') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "flatbuffers", marker = "(platform_machine == 'aarch64' and sys_platform == 'linux') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "numpy", marker = "(platform_machine == 'aarch64' and sys_platform == 'linux') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "packaging", marker = "(platform_machine == 'aarch64' and sys_platform == 'linux') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "protobuf", marker = "(platform_machine == 'aarch64' and sys_platform == 'linux') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "sympy", version = "1.13.1", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "sympy", version = "1.14.0", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "coloredlogs" }, + { name = "flatbuffers" }, + { name = "numpy", version = "1.26.4", source = { registry = "https://pypi.org/simple" } }, + { name = "packaging" }, + { name = "protobuf" }, + { name = "sympy", version = "1.13.1", source = { registry = "https://pypi.org/simple" }, marker = "(extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "sympy", version = "1.14.0", source = { registry = "https://pypi.org/simple" }, marker = "(extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, ] wheels = [ { url = "https://pypi.jetson-ai-lab.io/jp6/cu126/+f/4eb/e6a8902dc7708/onnxruntime_gpu-1.23.0-cp310-cp310-linux_aarch64.whl", hash = "sha256:4ebe6a8902dc7708434b2e1541b3fe629ebf434e16ab5537d1d6a622b42c622b" }, @@ -2768,7 +9802,9 @@ name = "opencv-contrib-python" version = "4.11.0.86" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "numpy" }, + { name = "numpy", version = "1.26.4", source = { registry = "https://pypi.org/simple" }, marker = "extra == 'extra-16-inference-models-onnx-jp6-cu126' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and extra == 'extra-16-inference-models-falcon-perception') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "2.4.4", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and extra == 'extra-16-inference-models-falcon-perception') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, ] sdist = { url = "https://files.pythonhosted.org/packages/ef/51/3ceb85ecff5f26994b7aae2922b1aa38148dbfe88cab13d63bc6facbac88/opencv-contrib-python-4.11.0.86.tar.gz", hash = "sha256:4ff773dab44911da366b906621c9592d4eb96f6ad3777098933a23f064aab38e", size = 150559874, upload-time = "2025-01-16T13:53:08.425Z" } wheels = [ @@ -2784,8 +9820,25 @@ wheels = [ name = "opencv-python" version = "4.11.0.86" source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin'", +] dependencies = [ - { name = "numpy" }, + { name = "numpy", version = "1.26.4", source = { registry = "https://pypi.org/simple" }, marker = "extra == 'extra-16-inference-models-onnx-jp6-cu126' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, ] sdist = { url = "https://files.pythonhosted.org/packages/17/06/68c27a523103dad5837dc5b87e71285280c4f098c60e4fe8a8db6486ab09/opencv-python-4.11.0.86.tar.gz", hash = "sha256:03d60ccae62304860d232272e4a4fda93c39d595780cb40b161b310244b736a4", size = 95171956, upload-time = "2025-01-16T13:52:24.737Z" } wheels = [ @@ -2797,12 +9850,731 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/a4/7d/f1c30a92854540bf789e9cd5dde7ef49bbe63f855b85a2e6b3db8135c591/opencv_python-4.11.0.86-cp37-abi3-win_amd64.whl", hash = "sha256:085ad9b77c18853ea66283e98affefe2de8cc4c1f43eda4c100cf9b2721142ec", size = 39488044, upload-time = "2025-01-16T13:52:21.928Z" }, ] +[[package]] +name = "opencv-python" +version = "4.13.0.92" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", +] +dependencies = [ + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and extra == 'extra-16-inference-models-falcon-perception') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "2.4.4", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and extra == 'extra-16-inference-models-falcon-perception') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/fc/6f/5a28fef4c4a382be06afe3938c64cc168223016fa520c5abaf37e8862aa5/opencv_python-4.13.0.92-cp37-abi3-macosx_13_0_arm64.whl", hash = "sha256:caf60c071ec391ba51ed00a4a920f996d0b64e3e46068aac1f646b5de0326a19", size = 46247052, upload-time = "2026-02-05T07:01:25.046Z" }, + { url = "https://files.pythonhosted.org/packages/08/ac/6c98c44c650b8114a0fb901691351cfb3956d502e8e9b5cd27f4ee7fbf2f/opencv_python-4.13.0.92-cp37-abi3-macosx_14_0_x86_64.whl", hash = "sha256:5868a8c028a0b37561579bfb8ac1875babdc69546d236249fff296a8c010ccf9", size = 32568781, upload-time = "2026-02-05T07:01:41.379Z" }, + { url = "https://files.pythonhosted.org/packages/3e/51/82fed528b45173bf629fa44effb76dff8bc9f4eeaee759038362dfa60237/opencv_python-4.13.0.92-cp37-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0bc2596e68f972ca452d80f444bc404e08807d021fbba40df26b61b18e01838a", size = 47685527, upload-time = "2026-02-05T06:59:11.24Z" }, + { url = "https://files.pythonhosted.org/packages/db/07/90b34a8e2cf9c50fe8ed25cac9011cde0676b4d9d9c973751ac7616223a2/opencv_python-4.13.0.92-cp37-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:402033cddf9d294693094de5ef532339f14ce821da3ad7df7c9f6e8316da32cf", size = 70460872, upload-time = "2026-02-05T06:59:19.162Z" }, + { url = "https://files.pythonhosted.org/packages/02/6d/7a9cc719b3eaf4377b9c2e3edeb7ed3a81de41f96421510c0a169ca3cfd4/opencv_python-4.13.0.92-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:bccaabf9eb7f897ca61880ce2869dcd9b25b72129c28478e7f2a5e8dee945616", size = 46708208, upload-time = "2026-02-05T06:59:15.419Z" }, + { url = "https://files.pythonhosted.org/packages/fd/55/b3b49a1b97aabcfbbd6c7326df9cb0b6fa0c0aefa8e89d500939e04aa229/opencv_python-4.13.0.92-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:620d602b8f7d8b8dab5f4b99c6eb353e78d3fb8b0f53db1bd258bb1aa001c1d5", size = 72927042, upload-time = "2026-02-05T06:59:23.389Z" }, + { url = "https://files.pythonhosted.org/packages/fb/17/de5458312bcb07ddf434d7bfcb24bb52c59635ad58c6e7c751b48949b009/opencv_python-4.13.0.92-cp37-abi3-win32.whl", hash = "sha256:372fe164a3148ac1ca51e5f3ad0541a4a276452273f503441d718fab9c5e5f59", size = 30932638, upload-time = "2026-02-05T07:02:14.98Z" }, + { url = "https://files.pythonhosted.org/packages/e9/a5/1be1516390333ff9be3a9cb648c9f33df79d5096e5884b5df71a588af463/opencv_python-4.13.0.92-cp37-abi3-win_amd64.whl", hash = "sha256:423d934c9fafb91aad38edf26efb46da91ffbc05f3f59c4b0c72e699720706f5", size = 40212062, upload-time = "2026-02-05T07:02:12.724Z" }, +] + [[package]] name = "opencv-python-headless" version = "4.11.0.86" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "numpy" }, + { name = "numpy", version = "1.26.4", source = { registry = "https://pypi.org/simple" }, marker = "extra == 'extra-16-inference-models-onnx-jp6-cu126' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and extra == 'extra-16-inference-models-falcon-perception') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "2.4.4", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and extra == 'extra-16-inference-models-falcon-perception') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, ] sdist = { url = "https://files.pythonhosted.org/packages/36/2f/5b2b3ba52c864848885ba988f24b7f105052f68da9ab0e693cc7c25b0b30/opencv-python-headless-4.11.0.86.tar.gz", hash = "sha256:996eb282ca4b43ec6a3972414de0e2331f5d9cda2b41091a49739c19fb843798", size = 95177929, upload-time = "2025-01-16T13:53:40.22Z" } wheels = [ @@ -2825,11 +10597,11 @@ wheels = [ [[package]] name = "packaging" -version = "25.0" +version = "26.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727, upload-time = "2025-04-19T11:48:59.673Z" } +sdist = { url = "https://files.pythonhosted.org/packages/65/ee/299d360cdc32edc7d2cf530f3accf79c4fca01e96ffc950d8a52213bd8e4/packaging-26.0.tar.gz", hash = "sha256:00243ae351a257117b6a241061796684b084ed1c516a08c48a3f7e147a9d80b4", size = 143416, upload-time = "2026-01-21T20:50:39.064Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" }, + { url = "https://files.pythonhosted.org/packages/b7/b9/c538f279a4e237a006a2c98387d081e9eb060d203d8ed34467cc0f0b9b53/packaging-26.0-py3-none-any.whl", hash = "sha256:b36f1fef9334a5588b4166f8bcd26a14e521f2b55e6b9de3aaa80d3ff7a37529", size = 74366, upload-time = "2026-01-21T20:50:37.788Z" }, ] [[package]] @@ -2841,6 +10613,97 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/90/96/04b8e52da071d28f5e21a805b19cb9390aa17a47462ac87f5e2696b9566d/paginate-0.5.7-py2.py3-none-any.whl", hash = "sha256:b885e2af73abcf01d9559fd5216b57ef722f8c42affbb63942377668e35c7591", size = 13746, upload-time = "2024-08-25T14:17:22.55Z" }, ] +[[package]] +name = "pandas" +version = "2.3.3" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin'", +] +dependencies = [ + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130')" }, + { name = "python-dateutil", marker = "python_full_version < '3.11' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130')" }, + { name = "pytz", marker = "python_full_version < '3.11' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130')" }, + { name = "tzdata", marker = "python_full_version < '3.11' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130')" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/33/01/d40b85317f86cf08d853a4f495195c73815fdf205eef3993821720274518/pandas-2.3.3.tar.gz", hash = "sha256:e05e1af93b977f7eafa636d043f9f94c7ee3ac81af99c13508215942e64c993b", size = 4495223, upload-time = "2025-09-29T23:34:51.853Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3d/f7/f425a00df4fcc22b292c6895c6831c0c8ae1d9fac1e024d16f98a9ce8749/pandas-2.3.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:376c6446ae31770764215a6c937f72d917f214b43560603cd60da6408f183b6c", size = 11555763, upload-time = "2025-09-29T23:16:53.287Z" }, + { url = "https://files.pythonhosted.org/packages/13/4f/66d99628ff8ce7857aca52fed8f0066ce209f96be2fede6cef9f84e8d04f/pandas-2.3.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e19d192383eab2f4ceb30b412b22ea30690c9e618f78870357ae1d682912015a", size = 10801217, upload-time = "2025-09-29T23:17:04.522Z" }, + { url = "https://files.pythonhosted.org/packages/1d/03/3fc4a529a7710f890a239cc496fc6d50ad4a0995657dccc1d64695adb9f4/pandas-2.3.3-cp310-cp310-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5caf26f64126b6c7aec964f74266f435afef1c1b13da3b0636c7518a1fa3e2b1", size = 12148791, upload-time = "2025-09-29T23:17:18.444Z" }, + { url = "https://files.pythonhosted.org/packages/40/a8/4dac1f8f8235e5d25b9955d02ff6f29396191d4e665d71122c3722ca83c5/pandas-2.3.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dd7478f1463441ae4ca7308a70e90b33470fa593429f9d4c578dd00d1fa78838", size = 12769373, upload-time = "2025-09-29T23:17:35.846Z" }, + { url = "https://files.pythonhosted.org/packages/df/91/82cc5169b6b25440a7fc0ef3a694582418d875c8e3ebf796a6d6470aa578/pandas-2.3.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:4793891684806ae50d1288c9bae9330293ab4e083ccd1c5e383c34549c6e4250", size = 13200444, upload-time = "2025-09-29T23:17:49.341Z" }, + { url = "https://files.pythonhosted.org/packages/10/ae/89b3283800ab58f7af2952704078555fa60c807fff764395bb57ea0b0dbd/pandas-2.3.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:28083c648d9a99a5dd035ec125d42439c6c1c525098c58af0fc38dd1a7a1b3d4", size = 13858459, upload-time = "2025-09-29T23:18:03.722Z" }, + { url = "https://files.pythonhosted.org/packages/85/72/530900610650f54a35a19476eca5104f38555afccda1aa11a92ee14cb21d/pandas-2.3.3-cp310-cp310-win_amd64.whl", hash = "sha256:503cf027cf9940d2ceaa1a93cfb5f8c8c7e6e90720a2850378f0b3f3b1e06826", size = 11346086, upload-time = "2025-09-29T23:18:18.505Z" }, + { url = "https://files.pythonhosted.org/packages/c1/fa/7ac648108144a095b4fb6aa3de1954689f7af60a14cf25583f4960ecb878/pandas-2.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:602b8615ebcc4a0c1751e71840428ddebeb142ec02c786e8ad6b1ce3c8dec523", size = 11578790, upload-time = "2025-09-29T23:18:30.065Z" }, + { url = "https://files.pythonhosted.org/packages/9b/35/74442388c6cf008882d4d4bdfc4109be87e9b8b7ccd097ad1e7f006e2e95/pandas-2.3.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8fe25fc7b623b0ef6b5009149627e34d2a4657e880948ec3c840e9402e5c1b45", size = 10833831, upload-time = "2025-09-29T23:38:56.071Z" }, + { url = "https://files.pythonhosted.org/packages/fe/e4/de154cbfeee13383ad58d23017da99390b91d73f8c11856f2095e813201b/pandas-2.3.3-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b468d3dad6ff947df92dcb32ede5b7bd41a9b3cceef0a30ed925f6d01fb8fa66", size = 12199267, upload-time = "2025-09-29T23:18:41.627Z" }, + { url = "https://files.pythonhosted.org/packages/bf/c9/63f8d545568d9ab91476b1818b4741f521646cbdd151c6efebf40d6de6f7/pandas-2.3.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b98560e98cb334799c0b07ca7967ac361a47326e9b4e5a7dfb5ab2b1c9d35a1b", size = 12789281, upload-time = "2025-09-29T23:18:56.834Z" }, + { url = "https://files.pythonhosted.org/packages/f2/00/a5ac8c7a0e67fd1a6059e40aa08fa1c52cc00709077d2300e210c3ce0322/pandas-2.3.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1d37b5848ba49824e5c30bedb9c830ab9b7751fd049bc7914533e01c65f79791", size = 13240453, upload-time = "2025-09-29T23:19:09.247Z" }, + { url = "https://files.pythonhosted.org/packages/27/4d/5c23a5bc7bd209231618dd9e606ce076272c9bc4f12023a70e03a86b4067/pandas-2.3.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:db4301b2d1f926ae677a751eb2bd0e8c5f5319c9cb3f88b0becbbb0b07b34151", size = 13890361, upload-time = "2025-09-29T23:19:25.342Z" }, + { url = "https://files.pythonhosted.org/packages/8e/59/712db1d7040520de7a4965df15b774348980e6df45c129b8c64d0dbe74ef/pandas-2.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:f086f6fe114e19d92014a1966f43a3e62285109afe874f067f5abbdcbb10e59c", size = 11348702, upload-time = "2025-09-29T23:19:38.296Z" }, + { url = "https://files.pythonhosted.org/packages/9c/fb/231d89e8637c808b997d172b18e9d4a4bc7bf31296196c260526055d1ea0/pandas-2.3.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d21f6d74eb1725c2efaa71a2bfc661a0689579b58e9c0ca58a739ff0b002b53", size = 11597846, upload-time = "2025-09-29T23:19:48.856Z" }, + { url = "https://files.pythonhosted.org/packages/5c/bd/bf8064d9cfa214294356c2d6702b716d3cf3bb24be59287a6a21e24cae6b/pandas-2.3.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3fd2f887589c7aa868e02632612ba39acb0b8948faf5cc58f0850e165bd46f35", size = 10729618, upload-time = "2025-09-29T23:39:08.659Z" }, + { url = "https://files.pythonhosted.org/packages/57/56/cf2dbe1a3f5271370669475ead12ce77c61726ffd19a35546e31aa8edf4e/pandas-2.3.3-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ecaf1e12bdc03c86ad4a7ea848d66c685cb6851d807a26aa245ca3d2017a1908", size = 11737212, upload-time = "2025-09-29T23:19:59.765Z" }, + { url = "https://files.pythonhosted.org/packages/e5/63/cd7d615331b328e287d8233ba9fdf191a9c2d11b6af0c7a59cfcec23de68/pandas-2.3.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b3d11d2fda7eb164ef27ffc14b4fcab16a80e1ce67e9f57e19ec0afaf715ba89", size = 12362693, upload-time = "2025-09-29T23:20:14.098Z" }, + { url = "https://files.pythonhosted.org/packages/a6/de/8b1895b107277d52f2b42d3a6806e69cfef0d5cf1d0ba343470b9d8e0a04/pandas-2.3.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a68e15f780eddf2b07d242e17a04aa187a7ee12b40b930bfdd78070556550e98", size = 12771002, upload-time = "2025-09-29T23:20:26.76Z" }, + { url = "https://files.pythonhosted.org/packages/87/21/84072af3187a677c5893b170ba2c8fbe450a6ff911234916da889b698220/pandas-2.3.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:371a4ab48e950033bcf52b6527eccb564f52dc826c02afd9a1bc0ab731bba084", size = 13450971, upload-time = "2025-09-29T23:20:41.344Z" }, + { url = "https://files.pythonhosted.org/packages/86/41/585a168330ff063014880a80d744219dbf1dd7a1c706e75ab3425a987384/pandas-2.3.3-cp312-cp312-win_amd64.whl", hash = "sha256:a16dcec078a01eeef8ee61bf64074b4e524a2a3f4b3be9326420cabe59c4778b", size = 10992722, upload-time = "2025-09-29T23:20:54.139Z" }, +] + +[[package]] +name = "pandas" +version = "3.0.2" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32'", +] +dependencies = [ + { name = "numpy", version = "2.4.4", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130')" }, + { name = "python-dateutil", marker = "python_full_version >= '3.11' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130')" }, + { name = "tzdata", marker = "(python_full_version >= '3.11' and sys_platform == 'emscripten') or (python_full_version >= '3.11' and sys_platform == 'win32') or (sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (sys_platform != 'emscripten' and sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'emscripten' and sys_platform != 'win32' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130')" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/da/99/b342345300f13440fe9fe385c3c481e2d9a595ee3bab4d3219247ac94e9a/pandas-3.0.2.tar.gz", hash = "sha256:f4753e73e34c8d83221ba58f232433fca2748be8b18dbca02d242ed153945043", size = 4645855, upload-time = "2026-03-31T06:48:30.816Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/97/35/6411db530c618e0e0005187e35aa02ce60ae4c4c4d206964a2f978217c27/pandas-3.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a727a73cbdba2f7458dc82449e2315899d5140b449015d822f515749a46cbbe0", size = 10326926, upload-time = "2026-03-31T06:46:08.29Z" }, + { url = "https://files.pythonhosted.org/packages/c4/d3/b7da1d5d7dbdc5ef52ed7debd2b484313b832982266905315dad5a0bf0b1/pandas-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dbbd4aa20ca51e63b53bbde6a0fa4254b1aaabb74d2f542df7a7959feb1d760c", size = 9926987, upload-time = "2026-03-31T06:46:11.724Z" }, + { url = "https://files.pythonhosted.org/packages/52/77/9b1c2d6070b5dbe239a7bc889e21bfa58720793fb902d1e070695d87c6d0/pandas-3.0.2-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:339dda302bd8369dedeae979cb750e484d549b563c3f54f3922cb8ff4978c5eb", size = 10757067, upload-time = "2026-03-31T06:46:14.903Z" }, + { url = "https://files.pythonhosted.org/packages/20/17/ec40d981705654853726e7ac9aea9ddbb4a5d9cf54d8472222f4f3de06c2/pandas-3.0.2-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:61c2fd96d72b983a9891b2598f286befd4ad262161a609c92dc1652544b46b76", size = 11258787, upload-time = "2026-03-31T06:46:17.683Z" }, + { url = "https://files.pythonhosted.org/packages/90/e3/3f1126d43d3702ca8773871a81c9f15122a1f412342cc56284ffda5b1f70/pandas-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c934008c733b8bbea273ea308b73b3156f0181e5b72960790b09c18a2794fe1e", size = 11771616, upload-time = "2026-03-31T06:46:20.532Z" }, + { url = "https://files.pythonhosted.org/packages/2e/cf/0f4e268e1f5062e44a6bda9f925806721cd4c95c2b808a4c82ebe914f96b/pandas-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:60a80bb4feacbef5e1447a3f82c33209c8b7e07f28d805cfd1fb951e5cb443aa", size = 12337623, upload-time = "2026-03-31T06:46:23.754Z" }, + { url = "https://files.pythonhosted.org/packages/44/a0/97a6339859d4acb2536efb24feb6708e82f7d33b2ed7e036f2983fcced82/pandas-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:ed72cb3f45190874eb579c64fa92d9df74e98fd63e2be7f62bce5ace0ade61df", size = 9897372, upload-time = "2026-03-31T06:46:26.703Z" }, + { url = "https://files.pythonhosted.org/packages/8f/eb/781516b808a99ddf288143cec46b342b3016c3414d137da1fdc3290d8860/pandas-3.0.2-cp311-cp311-win_arm64.whl", hash = "sha256:f12b1a9e332c01e09510586f8ca9b108fd631fd656af82e452d7315ef6df5f9f", size = 9154922, upload-time = "2026-03-31T06:46:30.284Z" }, + { url = "https://files.pythonhosted.org/packages/f3/b0/c20bd4d6d3f736e6bd6b55794e9cd0a617b858eaad27c8f410ea05d953b7/pandas-3.0.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:232a70ebb568c0c4d2db4584f338c1577d81e3af63292208d615907b698a0f18", size = 10347921, upload-time = "2026-03-31T06:46:33.36Z" }, + { url = "https://files.pythonhosted.org/packages/35/d0/4831af68ce30cc2d03c697bea8450e3225a835ef497d0d70f31b8cdde965/pandas-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:970762605cff1ca0d3f71ed4f3a769ea8f85fc8e6348f6e110b8fea7e6eb5a14", size = 9888127, upload-time = "2026-03-31T06:46:36.253Z" }, + { url = "https://files.pythonhosted.org/packages/61/a9/16ea9346e1fc4a96e2896242d9bc674764fb9049b0044c0132502f7a771e/pandas-3.0.2-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:aff4e6f4d722e0652707d7bcb190c445fe58428500c6d16005b02401764b1b3d", size = 10399577, upload-time = "2026-03-31T06:46:39.224Z" }, + { url = "https://files.pythonhosted.org/packages/c4/a8/3a61a721472959ab0ce865ef05d10b0d6bfe27ce8801c99f33d4fa996e65/pandas-3.0.2-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ef8b27695c3d3dc78403c9a7d5e59a62d5464a7e1123b4e0042763f7104dc74f", size = 10880030, upload-time = "2026-03-31T06:46:42.412Z" }, + { url = "https://files.pythonhosted.org/packages/da/65/7225c0ea4d6ce9cb2160a7fb7f39804871049f016e74782e5dade4d14109/pandas-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f8d68083e49e16b84734eb1a4dcae4259a75c90fb6e2251ab9a00b61120c06ab", size = 11409468, upload-time = "2026-03-31T06:46:45.2Z" }, + { url = "https://files.pythonhosted.org/packages/fa/5b/46e7c76032639f2132359b5cf4c785dd8cf9aea5ea64699eac752f02b9db/pandas-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:32cc41f310ebd4a296d93515fcac312216adfedb1894e879303987b8f1e2b97d", size = 11936381, upload-time = "2026-03-31T06:46:48.293Z" }, + { url = "https://files.pythonhosted.org/packages/7b/8b/721a9cff6fa6a91b162eb51019c6243b82b3226c71bb6c8ef4a9bd65cbc6/pandas-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:a4785e1d6547d8427c5208b748ae2efb64659a21bd82bf440d4262d02bfa02a4", size = 9744993, upload-time = "2026-03-31T06:46:51.488Z" }, + { url = "https://files.pythonhosted.org/packages/d5/18/7f0bd34ae27b28159aa80f2a6799f47fda34f7fb938a76e20c7b7fe3b200/pandas-3.0.2-cp312-cp312-win_arm64.whl", hash = "sha256:08504503f7101300107ecdc8df73658e4347586db5cfdadabc1592e9d7e7a0fd", size = 9056118, upload-time = "2026-03-31T06:46:54.548Z" }, +] + [[package]] name = "pandocfilters" version = "1.5.1" @@ -2852,20 +10715,20 @@ wheels = [ [[package]] name = "parso" -version = "0.8.5" +version = "0.8.6" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d4/de/53e0bcf53d13e005bd8c92e7855142494f41171b34c2536b86187474184d/parso-0.8.5.tar.gz", hash = "sha256:034d7354a9a018bdce352f48b2a8a450f05e9d6ee85db84764e9b6bd96dafe5a", size = 401205, upload-time = "2025-08-23T15:15:28.028Z" } +sdist = { url = "https://files.pythonhosted.org/packages/81/76/a1e769043c0c0c9fe391b702539d594731a4362334cdf4dc25d0c09761e7/parso-0.8.6.tar.gz", hash = "sha256:2b9a0332696df97d454fa67b81618fd69c35a7b90327cbe6ba5c92d2c68a7bfd", size = 401621, upload-time = "2026-02-09T15:45:24.425Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/16/32/f8e3c85d1d5250232a5d3477a2a28cc291968ff175caeadaf3cc19ce0e4a/parso-0.8.5-py2.py3-none-any.whl", hash = "sha256:646204b5ee239c396d040b90f9e272e9a8017c630092bf59980beb62fd033887", size = 106668, upload-time = "2025-08-23T15:15:25.663Z" }, + { url = "https://files.pythonhosted.org/packages/b6/61/fae042894f4296ec49e3f193aff5d7c18440da9e48102c3315e1bc4519a7/parso-0.8.6-py2.py3-none-any.whl", hash = "sha256:2c549f800b70a5c4952197248825584cb00f033b29c692671d3bf08bf380baff", size = 106894, upload-time = "2026-02-09T15:45:21.391Z" }, ] [[package]] name = "pathspec" -version = "1.0.3" +version = "1.0.4" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/4c/b2/bb8e495d5262bfec41ab5cb18f522f1012933347fb5d9e62452d446baca2/pathspec-1.0.3.tar.gz", hash = "sha256:bac5cf97ae2c2876e2d25ebb15078eb04d76e4b98921ee31c6f85ade8b59444d", size = 130841, upload-time = "2026-01-09T15:46:46.009Z" } +sdist = { url = "https://files.pythonhosted.org/packages/fa/36/e27608899f9b8d4dff0617b2d9ab17ca5608956ca44461ac14ac48b44015/pathspec-1.0.4.tar.gz", hash = "sha256:0210e2ae8a21a9137c0d470578cb0e595af87edaa6ebf12ff176f14a02e0e645", size = 131200, upload-time = "2026-01-27T03:59:46.938Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/32/2b/121e912bd60eebd623f873fd090de0e84f322972ab25a7f9044c056804ed/pathspec-1.0.3-py3-none-any.whl", hash = "sha256:e80767021c1cc524aa3fb14bedda9c34406591343cc42797b386ce7b9354fb6c", size = 55021, upload-time = "2026-01-09T15:46:44.652Z" }, + { url = "https://files.pythonhosted.org/packages/ef/3c/2c197d226f9ea224a9ab8d197933f9da0ae0aac5b6e0f884e2b8d9c8e9f7/pathspec-1.0.4-py3-none-any.whl", hash = "sha256:fb6ae2fd4e7c921a165808a552060e722767cfa526f99ca5156ed2ce45a5c723", size = 55206, upload-time = "2026-01-27T03:59:45.137Z" }, ] [[package]] @@ -2875,18 +10738,20 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "accelerate" }, { name = "huggingface-hub" }, - { name = "numpy" }, + { name = "numpy", version = "1.26.4", source = { registry = "https://pypi.org/simple" }, marker = "extra == 'extra-16-inference-models-onnx-jp6-cu126' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and extra == 'extra-16-inference-models-falcon-perception') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "2.4.4", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and extra == 'extra-16-inference-models-falcon-perception') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, { name = "packaging" }, { name = "psutil" }, { name = "pyyaml" }, { name = "safetensors" }, - { name = "torch", version = "2.6.0+cu124", source = { registry = "https://download.pytorch.org/whl/cu124" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torch", version = "2.7.1", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torch", version = "2.7.1+cpu", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torch", version = "2.7.1+cu118", source = { registry = "https://download.pytorch.org/whl/cu118" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torch", version = "2.7.1+cu128", source = { registry = "https://download.pytorch.org/whl/cu128" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torch", version = "2.8.0", source = { registry = "https://pypi.jetson-ai-lab.io/jp6/cu126/+simple" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torch", version = "2.8.0", source = { registry = "https://pypi.org/simple" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torch", version = "2.6.0+cu124", source = { registry = "https://download.pytorch.org/whl/cu124" }, marker = "extra == 'extra-16-inference-models-torch-cu124' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torch", version = "2.7.1+cu118", source = { registry = "https://download.pytorch.org/whl/cu118" }, marker = "extra == 'extra-16-inference-models-torch-cu118' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torch", version = "2.11.0", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torch", version = "2.11.0", source = { registry = "https://pypi.jetson-ai-lab.io/jp6/cu126/+simple" }, marker = "extra == 'extra-16-inference-models-torch-jp6-cu126' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126')" }, + { name = "torch", version = "2.11.0", source = { registry = "https://pypi.org/simple" }, marker = "extra == 'extra-16-inference-models-torch-cu126' or extra == 'extra-16-inference-models-torch-cu130' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torch", version = "2.11.0+cpu", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torch", version = "2.11.0+cu128", source = { registry = "https://download.pytorch.org/whl/cu128" }, marker = "extra == 'extra-16-inference-models-torch-cu128' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, { name = "tqdm" }, { name = "transformers" }, ] @@ -2900,7 +10765,7 @@ name = "pexpect" version = "4.9.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "ptyprocess" }, + { name = "ptyprocess", marker = "(python_full_version < '3.11' and sys_platform == 'emscripten') or (python_full_version < '3.11' and sys_platform == 'win32') or (sys_platform != 'emscripten' and sys_platform != 'win32') or (sys_platform == 'emscripten' and extra != 'extra-16-inference-models-falcon-perception') or (sys_platform == 'win32' and extra != 'extra-16-inference-models-falcon-perception') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130')" }, ] sdist = { url = "https://files.pythonhosted.org/packages/42/92/cc564bf6381ff43ce1f4d06852fc19a2f11d180f23dc32d9588bee2f149d/pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f", size = 166450, upload-time = "2023-11-25T09:07:26.339Z" } wheels = [ @@ -2909,75 +10774,59 @@ wheels = [ [[package]] name = "pillow" -version = "11.2.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/af/cb/bb5c01fcd2a69335b86c22142b2bccfc3464087efb7fd382eee5ffc7fdf7/pillow-11.2.1.tar.gz", hash = "sha256:a64dd61998416367b7ef979b73d3a85853ba9bec4c2925f74e588879a58716b6", size = 47026707, upload-time = "2025-04-12T17:50:03.289Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/0d/8b/b158ad57ed44d3cc54db8d68ad7c0a58b8fc0e4c7a3f995f9d62d5b464a1/pillow-11.2.1-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:d57a75d53922fc20c165016a20d9c44f73305e67c351bbc60d1adaf662e74047", size = 3198442, upload-time = "2025-04-12T17:47:10.666Z" }, - { url = "https://files.pythonhosted.org/packages/b1/f8/bb5d956142f86c2d6cc36704943fa761f2d2e4c48b7436fd0a85c20f1713/pillow-11.2.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:127bf6ac4a5b58b3d32fc8289656f77f80567d65660bc46f72c0d77e6600cc95", size = 3030553, upload-time = "2025-04-12T17:47:13.153Z" }, - { url = "https://files.pythonhosted.org/packages/22/7f/0e413bb3e2aa797b9ca2c5c38cb2e2e45d88654e5b12da91ad446964cfae/pillow-11.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4ba4be812c7a40280629e55ae0b14a0aafa150dd6451297562e1764808bbe61", size = 4405503, upload-time = "2025-04-12T17:47:15.36Z" }, - { url = "https://files.pythonhosted.org/packages/f3/b4/cc647f4d13f3eb837d3065824aa58b9bcf10821f029dc79955ee43f793bd/pillow-11.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c8bd62331e5032bc396a93609982a9ab6b411c05078a52f5fe3cc59234a3abd1", size = 4490648, upload-time = "2025-04-12T17:47:17.37Z" }, - { url = "https://files.pythonhosted.org/packages/c2/6f/240b772a3b35cdd7384166461567aa6713799b4e78d180c555bd284844ea/pillow-11.2.1-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:562d11134c97a62fe3af29581f083033179f7ff435f78392565a1ad2d1c2c45c", size = 4508937, upload-time = "2025-04-12T17:47:19.066Z" }, - { url = "https://files.pythonhosted.org/packages/f3/5e/7ca9c815ade5fdca18853db86d812f2f188212792780208bdb37a0a6aef4/pillow-11.2.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:c97209e85b5be259994eb5b69ff50c5d20cca0f458ef9abd835e262d9d88b39d", size = 4599802, upload-time = "2025-04-12T17:47:21.404Z" }, - { url = "https://files.pythonhosted.org/packages/02/81/c3d9d38ce0c4878a77245d4cf2c46d45a4ad0f93000227910a46caff52f3/pillow-11.2.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:0c3e6d0f59171dfa2e25d7116217543310908dfa2770aa64b8f87605f8cacc97", size = 4576717, upload-time = "2025-04-12T17:47:23.571Z" }, - { url = "https://files.pythonhosted.org/packages/42/49/52b719b89ac7da3185b8d29c94d0e6aec8140059e3d8adcaa46da3751180/pillow-11.2.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cc1c3bc53befb6096b84165956e886b1729634a799e9d6329a0c512ab651e579", size = 4654874, upload-time = "2025-04-12T17:47:25.783Z" }, - { url = "https://files.pythonhosted.org/packages/5b/0b/ede75063ba6023798267023dc0d0401f13695d228194d2242d5a7ba2f964/pillow-11.2.1-cp310-cp310-win32.whl", hash = "sha256:312c77b7f07ab2139924d2639860e084ec2a13e72af54d4f08ac843a5fc9c79d", size = 2331717, upload-time = "2025-04-12T17:47:28.922Z" }, - { url = "https://files.pythonhosted.org/packages/ed/3c/9831da3edea527c2ed9a09f31a2c04e77cd705847f13b69ca60269eec370/pillow-11.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:9bc7ae48b8057a611e5fe9f853baa88093b9a76303937449397899385da06fad", size = 2676204, upload-time = "2025-04-12T17:47:31.283Z" }, - { url = "https://files.pythonhosted.org/packages/01/97/1f66ff8a1503d8cbfc5bae4dc99d54c6ec1e22ad2b946241365320caabc2/pillow-11.2.1-cp310-cp310-win_arm64.whl", hash = "sha256:2728567e249cdd939f6cc3d1f049595c66e4187f3c34078cbc0a7d21c47482d2", size = 2414767, upload-time = "2025-04-12T17:47:34.655Z" }, - { url = "https://files.pythonhosted.org/packages/68/08/3fbf4b98924c73037a8e8b4c2c774784805e0fb4ebca6c5bb60795c40125/pillow-11.2.1-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:35ca289f712ccfc699508c4658a1d14652e8033e9b69839edf83cbdd0ba39e70", size = 3198450, upload-time = "2025-04-12T17:47:37.135Z" }, - { url = "https://files.pythonhosted.org/packages/84/92/6505b1af3d2849d5e714fc75ba9e69b7255c05ee42383a35a4d58f576b16/pillow-11.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e0409af9f829f87a2dfb7e259f78f317a5351f2045158be321fd135973fff7bf", size = 3030550, upload-time = "2025-04-12T17:47:39.345Z" }, - { url = "https://files.pythonhosted.org/packages/3c/8c/ac2f99d2a70ff966bc7eb13dacacfaab57c0549b2ffb351b6537c7840b12/pillow-11.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4e5c5edee874dce4f653dbe59db7c73a600119fbea8d31f53423586ee2aafd7", size = 4415018, upload-time = "2025-04-12T17:47:41.128Z" }, - { url = "https://files.pythonhosted.org/packages/1f/e3/0a58b5d838687f40891fff9cbaf8669f90c96b64dc8f91f87894413856c6/pillow-11.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b93a07e76d13bff9444f1a029e0af2964e654bfc2e2c2d46bfd080df5ad5f3d8", size = 4498006, upload-time = "2025-04-12T17:47:42.912Z" }, - { url = "https://files.pythonhosted.org/packages/21/f5/6ba14718135f08fbfa33308efe027dd02b781d3f1d5c471444a395933aac/pillow-11.2.1-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:e6def7eed9e7fa90fde255afaf08060dc4b343bbe524a8f69bdd2a2f0018f600", size = 4517773, upload-time = "2025-04-12T17:47:44.611Z" }, - { url = "https://files.pythonhosted.org/packages/20/f2/805ad600fc59ebe4f1ba6129cd3a75fb0da126975c8579b8f57abeb61e80/pillow-11.2.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:8f4f3724c068be008c08257207210c138d5f3731af6c155a81c2b09a9eb3a788", size = 4607069, upload-time = "2025-04-12T17:47:46.46Z" }, - { url = "https://files.pythonhosted.org/packages/71/6b/4ef8a288b4bb2e0180cba13ca0a519fa27aa982875882392b65131401099/pillow-11.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a0a6709b47019dff32e678bc12c63008311b82b9327613f534e496dacaefb71e", size = 4583460, upload-time = "2025-04-12T17:47:49.255Z" }, - { url = "https://files.pythonhosted.org/packages/62/ae/f29c705a09cbc9e2a456590816e5c234382ae5d32584f451c3eb41a62062/pillow-11.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f6b0c664ccb879109ee3ca702a9272d877f4fcd21e5eb63c26422fd6e415365e", size = 4661304, upload-time = "2025-04-12T17:47:51.067Z" }, - { url = "https://files.pythonhosted.org/packages/6e/1a/c8217b6f2f73794a5e219fbad087701f412337ae6dbb956db37d69a9bc43/pillow-11.2.1-cp311-cp311-win32.whl", hash = "sha256:cc5d875d56e49f112b6def6813c4e3d3036d269c008bf8aef72cd08d20ca6df6", size = 2331809, upload-time = "2025-04-12T17:47:54.425Z" }, - { url = "https://files.pythonhosted.org/packages/e2/72/25a8f40170dc262e86e90f37cb72cb3de5e307f75bf4b02535a61afcd519/pillow-11.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:0f5c7eda47bf8e3c8a283762cab94e496ba977a420868cb819159980b6709193", size = 2676338, upload-time = "2025-04-12T17:47:56.535Z" }, - { url = "https://files.pythonhosted.org/packages/06/9e/76825e39efee61efea258b479391ca77d64dbd9e5804e4ad0fa453b4ba55/pillow-11.2.1-cp311-cp311-win_arm64.whl", hash = "sha256:4d375eb838755f2528ac8cbc926c3e31cc49ca4ad0cf79cff48b20e30634a4a7", size = 2414918, upload-time = "2025-04-12T17:47:58.217Z" }, - { url = "https://files.pythonhosted.org/packages/c7/40/052610b15a1b8961f52537cc8326ca6a881408bc2bdad0d852edeb6ed33b/pillow-11.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:78afba22027b4accef10dbd5eed84425930ba41b3ea0a86fa8d20baaf19d807f", size = 3190185, upload-time = "2025-04-12T17:48:00.417Z" }, - { url = "https://files.pythonhosted.org/packages/e5/7e/b86dbd35a5f938632093dc40d1682874c33dcfe832558fc80ca56bfcb774/pillow-11.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:78092232a4ab376a35d68c4e6d5e00dfd73454bd12b230420025fbe178ee3b0b", size = 3030306, upload-time = "2025-04-12T17:48:02.391Z" }, - { url = "https://files.pythonhosted.org/packages/a4/5c/467a161f9ed53e5eab51a42923c33051bf8d1a2af4626ac04f5166e58e0c/pillow-11.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25a5f306095c6780c52e6bbb6109624b95c5b18e40aab1c3041da3e9e0cd3e2d", size = 4416121, upload-time = "2025-04-12T17:48:04.554Z" }, - { url = "https://files.pythonhosted.org/packages/62/73/972b7742e38ae0e2ac76ab137ca6005dcf877480da0d9d61d93b613065b4/pillow-11.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c7b29dbd4281923a2bfe562acb734cee96bbb129e96e6972d315ed9f232bef4", size = 4501707, upload-time = "2025-04-12T17:48:06.831Z" }, - { url = "https://files.pythonhosted.org/packages/e4/3a/427e4cb0b9e177efbc1a84798ed20498c4f233abde003c06d2650a6d60cb/pillow-11.2.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:3e645b020f3209a0181a418bffe7b4a93171eef6c4ef6cc20980b30bebf17b7d", size = 4522921, upload-time = "2025-04-12T17:48:09.229Z" }, - { url = "https://files.pythonhosted.org/packages/fe/7c/d8b1330458e4d2f3f45d9508796d7caf0c0d3764c00c823d10f6f1a3b76d/pillow-11.2.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:b2dbea1012ccb784a65349f57bbc93730b96e85b42e9bf7b01ef40443db720b4", size = 4612523, upload-time = "2025-04-12T17:48:11.631Z" }, - { url = "https://files.pythonhosted.org/packages/b3/2f/65738384e0b1acf451de5a573d8153fe84103772d139e1e0bdf1596be2ea/pillow-11.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:da3104c57bbd72948d75f6a9389e6727d2ab6333c3617f0a89d72d4940aa0443", size = 4587836, upload-time = "2025-04-12T17:48:13.592Z" }, - { url = "https://files.pythonhosted.org/packages/6a/c5/e795c9f2ddf3debb2dedd0df889f2fe4b053308bb59a3cc02a0cd144d641/pillow-11.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:598174aef4589af795f66f9caab87ba4ff860ce08cd5bb447c6fc553ffee603c", size = 4669390, upload-time = "2025-04-12T17:48:15.938Z" }, - { url = "https://files.pythonhosted.org/packages/96/ae/ca0099a3995976a9fce2f423166f7bff9b12244afdc7520f6ed38911539a/pillow-11.2.1-cp312-cp312-win32.whl", hash = "sha256:1d535df14716e7f8776b9e7fee118576d65572b4aad3ed639be9e4fa88a1cad3", size = 2332309, upload-time = "2025-04-12T17:48:17.885Z" }, - { url = "https://files.pythonhosted.org/packages/7c/18/24bff2ad716257fc03da964c5e8f05d9790a779a8895d6566e493ccf0189/pillow-11.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:14e33b28bf17c7a38eede290f77db7c664e4eb01f7869e37fa98a5aa95978941", size = 2676768, upload-time = "2025-04-12T17:48:19.655Z" }, - { url = "https://files.pythonhosted.org/packages/da/bb/e8d656c9543276517ee40184aaa39dcb41e683bca121022f9323ae11b39d/pillow-11.2.1-cp312-cp312-win_arm64.whl", hash = "sha256:21e1470ac9e5739ff880c211fc3af01e3ae505859392bf65458c224d0bf283eb", size = 2415087, upload-time = "2025-04-12T17:48:21.991Z" }, - { url = "https://files.pythonhosted.org/packages/33/49/c8c21e4255b4f4a2c0c68ac18125d7f5460b109acc6dfdef1a24f9b960ef/pillow-11.2.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:9b7b0d4fd2635f54ad82785d56bc0d94f147096493a79985d0ab57aedd563156", size = 3181727, upload-time = "2025-04-12T17:49:31.898Z" }, - { url = "https://files.pythonhosted.org/packages/6d/f1/f7255c0838f8c1ef6d55b625cfb286835c17e8136ce4351c5577d02c443b/pillow-11.2.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:aa442755e31c64037aa7c1cb186e0b369f8416c567381852c63444dd666fb772", size = 2999833, upload-time = "2025-04-12T17:49:34.2Z" }, - { url = "https://files.pythonhosted.org/packages/e2/57/9968114457bd131063da98d87790d080366218f64fa2943b65ac6739abb3/pillow-11.2.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0d3348c95b766f54b76116d53d4cb171b52992a1027e7ca50c81b43b9d9e363", size = 3437472, upload-time = "2025-04-12T17:49:36.294Z" }, - { url = "https://files.pythonhosted.org/packages/b2/1b/e35d8a158e21372ecc48aac9c453518cfe23907bb82f950d6e1c72811eb0/pillow-11.2.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85d27ea4c889342f7e35f6d56e7e1cb345632ad592e8c51b693d7b7556043ce0", size = 3459976, upload-time = "2025-04-12T17:49:38.988Z" }, - { url = "https://files.pythonhosted.org/packages/26/da/2c11d03b765efff0ccc473f1c4186dc2770110464f2177efaed9cf6fae01/pillow-11.2.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:bf2c33d6791c598142f00c9c4c7d47f6476731c31081331664eb26d6ab583e01", size = 3527133, upload-time = "2025-04-12T17:49:40.985Z" }, - { url = "https://files.pythonhosted.org/packages/79/1a/4e85bd7cadf78412c2a3069249a09c32ef3323650fd3005c97cca7aa21df/pillow-11.2.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e616e7154c37669fc1dfc14584f11e284e05d1c650e1c0f972f281c4ccc53193", size = 3571555, upload-time = "2025-04-12T17:49:42.964Z" }, - { url = "https://files.pythonhosted.org/packages/69/03/239939915216de1e95e0ce2334bf17a7870ae185eb390fab6d706aadbfc0/pillow-11.2.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:39ad2e0f424394e3aebc40168845fee52df1394a4673a6ee512d840d14ab3013", size = 2674713, upload-time = "2025-04-12T17:49:44.944Z" }, - { url = "https://files.pythonhosted.org/packages/a4/ad/2613c04633c7257d9481ab21d6b5364b59fc5d75faafd7cb8693523945a3/pillow-11.2.1-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:80f1df8dbe9572b4b7abdfa17eb5d78dd620b1d55d9e25f834efdbee872d3aed", size = 3181734, upload-time = "2025-04-12T17:49:46.789Z" }, - { url = "https://files.pythonhosted.org/packages/a4/fd/dcdda4471ed667de57bb5405bb42d751e6cfdd4011a12c248b455c778e03/pillow-11.2.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:ea926cfbc3957090becbcbbb65ad177161a2ff2ad578b5a6ec9bb1e1cd78753c", size = 2999841, upload-time = "2025-04-12T17:49:48.812Z" }, - { url = "https://files.pythonhosted.org/packages/ac/89/8a2536e95e77432833f0db6fd72a8d310c8e4272a04461fb833eb021bf94/pillow-11.2.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:738db0e0941ca0376804d4de6a782c005245264edaa253ffce24e5a15cbdc7bd", size = 3437470, upload-time = "2025-04-12T17:49:50.831Z" }, - { url = "https://files.pythonhosted.org/packages/9d/8f/abd47b73c60712f88e9eda32baced7bfc3e9bd6a7619bb64b93acff28c3e/pillow-11.2.1-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9db98ab6565c69082ec9b0d4e40dd9f6181dab0dd236d26f7a50b8b9bfbd5076", size = 3460013, upload-time = "2025-04-12T17:49:53.278Z" }, - { url = "https://files.pythonhosted.org/packages/f6/20/5c0a0aa83b213b7a07ec01e71a3d6ea2cf4ad1d2c686cc0168173b6089e7/pillow-11.2.1-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:036e53f4170e270ddb8797d4c590e6dd14d28e15c7da375c18978045f7e6c37b", size = 3527165, upload-time = "2025-04-12T17:49:55.164Z" }, - { url = "https://files.pythonhosted.org/packages/58/0e/2abab98a72202d91146abc839e10c14f7cf36166f12838ea0c4db3ca6ecb/pillow-11.2.1-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:14f73f7c291279bd65fda51ee87affd7c1e097709f7fdd0188957a16c264601f", size = 3571586, upload-time = "2025-04-12T17:49:57.171Z" }, - { url = "https://files.pythonhosted.org/packages/21/2c/5e05f58658cf49b6667762cca03d6e7d85cededde2caf2ab37b81f80e574/pillow-11.2.1-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:208653868d5c9ecc2b327f9b9ef34e0e42a4cdd172c2988fd81d62d2bc9bc044", size = 2674751, upload-time = "2025-04-12T17:49:59.628Z" }, -] - -[[package]] -name = "pkgconfig" -version = "1.6.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/52/fd/0adde075cd3bfecd557bc7d757e00e231d34d8a6edb4c8d1642759254c21/pkgconfig-1.6.0.tar.gz", hash = "sha256:4a5a6631ce937fafac457104a40d558785a658bbdca5c49b6295bc3fd651907f", size = 5691, upload-time = "2026-03-06T11:26:01.194Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/75/6f/f7ec07fba48f07c555cc4099481df644fbbc12067879072c17ac229f6556/pkgconfig-1.6.0-py3-none-any.whl", hash = "sha256:98e71754855e9563838d952a160eb577edabb57782e49853edb5381927e6bea1", size = 7086, upload-time = "2026-03-06T11:26:07.688Z" }, +version = "12.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8c/21/c2bcdd5906101a30244eaffc1b6e6ce71a31bd0742a01eb89e660ebfac2d/pillow-12.2.0.tar.gz", hash = "sha256:a830b1a40919539d07806aa58e1b114df53ddd43213d9c8b75847eee6c0182b5", size = 46987819, upload-time = "2026-04-01T14:46:17.687Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3a/aa/d0b28e1c811cd4d5f5c2bfe2e022292bd255ae5744a3b9ac7d6c8f72dd75/pillow-12.2.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:a4e8f36e677d3336f35089648c8955c51c6d386a13cf6ee9c189c5f5bd713a9f", size = 5354355, upload-time = "2026-04-01T14:42:15.402Z" }, + { url = "https://files.pythonhosted.org/packages/27/8e/1d5b39b8ae2bd7650d0c7b6abb9602d16043ead9ebbfef4bc4047454da2a/pillow-12.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e589959f10d9824d39b350472b92f0ce3b443c0a3442ebf41c40cb8361c5b97", size = 4695871, upload-time = "2026-04-01T14:42:18.234Z" }, + { url = "https://files.pythonhosted.org/packages/f0/c5/dcb7a6ca6b7d3be41a76958e90018d56c8462166b3ef223150360850c8da/pillow-12.2.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:a52edc8bfff4429aaabdf4d9ee0daadbbf8562364f940937b941f87a4290f5ff", size = 6269734, upload-time = "2026-04-01T14:42:20.608Z" }, + { url = "https://files.pythonhosted.org/packages/ea/f1/aa1bb13b2f4eba914e9637893c73f2af8e48d7d4023b9d3750d4c5eb2d0c/pillow-12.2.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:975385f4776fafde056abb318f612ef6285b10a1f12b8570f3647ad0d74b48ec", size = 8076080, upload-time = "2026-04-01T14:42:23.095Z" }, + { url = "https://files.pythonhosted.org/packages/a1/2a/8c79d6a53169937784604a8ae8d77e45888c41537f7f6f65ed1f407fe66d/pillow-12.2.0-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bd9c0c7a0c681a347b3194c500cb1e6ca9cab053ea4d82a5cf45b6b754560136", size = 6382236, upload-time = "2026-04-01T14:42:25.82Z" }, + { url = "https://files.pythonhosted.org/packages/b5/42/bbcb6051030e1e421d103ce7a8ecadf837aa2f39b8f82ef1a8d37c3d4ebc/pillow-12.2.0-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:88d387ff40b3ff7c274947ed3125dedf5262ec6919d83946753b5f3d7c67ea4c", size = 7070220, upload-time = "2026-04-01T14:42:28.68Z" }, + { url = "https://files.pythonhosted.org/packages/3f/e1/c2a7d6dd8cfa6b231227da096fd2d58754bab3603b9d73bf609d3c18b64f/pillow-12.2.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:51c4167c34b0d8ba05b547a3bb23578d0ba17b80a5593f93bd8ecb123dd336a3", size = 6493124, upload-time = "2026-04-01T14:42:31.579Z" }, + { url = "https://files.pythonhosted.org/packages/5f/41/7c8617da5d32e1d2f026e509484fdb6f3ad7efaef1749a0c1928adbb099e/pillow-12.2.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:34c0d99ecccea270c04882cb3b86e7b57296079c9a4aff88cb3b33563d95afaa", size = 7194324, upload-time = "2026-04-01T14:42:34.615Z" }, + { url = "https://files.pythonhosted.org/packages/2d/de/a777627e19fd6d62f84070ee1521adde5eeda4855b5cf60fe0b149118bca/pillow-12.2.0-cp310-cp310-win32.whl", hash = "sha256:b85f66ae9eb53e860a873b858b789217ba505e5e405a24b85c0464822fe88032", size = 6376363, upload-time = "2026-04-01T14:42:37.19Z" }, + { url = "https://files.pythonhosted.org/packages/e7/34/fc4cb5204896465842767b96d250c08410f01f2f28afc43b257de842eed5/pillow-12.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:673aa32138f3e7531ccdbca7b3901dba9b70940a19ccecc6a37c77d5fdeb05b5", size = 7083523, upload-time = "2026-04-01T14:42:39.62Z" }, + { url = "https://files.pythonhosted.org/packages/2d/a0/32852d36bc7709f14dc3f64f929a275e958ad8c19a6deba9610d458e28b3/pillow-12.2.0-cp310-cp310-win_arm64.whl", hash = "sha256:3e080565d8d7c671db5802eedfb438e5565ffa40115216eabb8cd52d0ecce024", size = 2463318, upload-time = "2026-04-01T14:42:42.063Z" }, + { url = "https://files.pythonhosted.org/packages/68/e1/748f5663efe6edcfc4e74b2b93edfb9b8b99b67f21a854c3ae416500a2d9/pillow-12.2.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:8be29e59487a79f173507c30ddf57e733a357f67881430449bb32614075a40ab", size = 5354347, upload-time = "2026-04-01T14:42:44.255Z" }, + { url = "https://files.pythonhosted.org/packages/47/a1/d5ff69e747374c33a3b53b9f98cca7889fce1fd03d79cdc4e1bccc6c5a87/pillow-12.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:71cde9a1e1551df7d34a25462fc60325e8a11a82cc2e2f54578e5e9a1e153d65", size = 4695873, upload-time = "2026-04-01T14:42:46.452Z" }, + { url = "https://files.pythonhosted.org/packages/df/21/e3fbdf54408a973c7f7f89a23b2cb97a7ef30c61ab4142af31eee6aebc88/pillow-12.2.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:f490f9368b6fc026f021db16d7ec2fbf7d89e2edb42e8ec09d2c60505f5729c7", size = 6280168, upload-time = "2026-04-01T14:42:49.228Z" }, + { url = "https://files.pythonhosted.org/packages/d3/f1/00b7278c7dd52b17ad4329153748f87b6756ec195ff786c2bdf12518337d/pillow-12.2.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8bd7903a5f2a4545f6fd5935c90058b89d30045568985a71c79f5fd6edf9b91e", size = 8088188, upload-time = "2026-04-01T14:42:51.735Z" }, + { url = "https://files.pythonhosted.org/packages/ad/cf/220a5994ef1b10e70e85748b75649d77d506499352be135a4989c957b701/pillow-12.2.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3997232e10d2920a68d25191392e3a4487d8183039e1c74c2297f00ed1c50705", size = 6394401, upload-time = "2026-04-01T14:42:54.343Z" }, + { url = "https://files.pythonhosted.org/packages/e9/bd/e51a61b1054f09437acfbc2ff9106c30d1eb76bc1453d428399946781253/pillow-12.2.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e74473c875d78b8e9d5da2a70f7099549f9eb37ded4e2f6a463e60125bccd176", size = 7079655, upload-time = "2026-04-01T14:42:56.954Z" }, + { url = "https://files.pythonhosted.org/packages/6b/3d/45132c57d5fb4b5744567c3817026480ac7fc3ce5d4c47902bc0e7f6f853/pillow-12.2.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:56a3f9c60a13133a98ecff6197af34d7824de9b7b38c3654861a725c970c197b", size = 6503105, upload-time = "2026-04-01T14:42:59.847Z" }, + { url = "https://files.pythonhosted.org/packages/7d/2e/9df2fc1e82097b1df3dce58dc43286aa01068e918c07574711fcc53e6fb4/pillow-12.2.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:90e6f81de50ad6b534cab6e5aef77ff6e37722b2f5d908686f4a5c9eba17a909", size = 7203402, upload-time = "2026-04-01T14:43:02.664Z" }, + { url = "https://files.pythonhosted.org/packages/bd/2e/2941e42858ebb67e50ae741473de81c2984e6eff7b397017623c676e2e8d/pillow-12.2.0-cp311-cp311-win32.whl", hash = "sha256:8c984051042858021a54926eb597d6ee3012393ce9c181814115df4c60b9a808", size = 6378149, upload-time = "2026-04-01T14:43:05.274Z" }, + { url = "https://files.pythonhosted.org/packages/69/42/836b6f3cd7f3e5fa10a1f1a5420447c17966044c8fbf589cc0452d5502db/pillow-12.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:6e6b2a0c538fc200b38ff9eb6628228b77908c319a005815f2dde585a0664b60", size = 7082626, upload-time = "2026-04-01T14:43:08.557Z" }, + { url = "https://files.pythonhosted.org/packages/c2/88/549194b5d6f1f494b485e493edc6693c0a16f4ada488e5bd974ed1f42fad/pillow-12.2.0-cp311-cp311-win_arm64.whl", hash = "sha256:9a8a34cc89c67a65ea7437ce257cea81a9dad65b29805f3ecee8c8fe8ff25ffe", size = 2463531, upload-time = "2026-04-01T14:43:10.743Z" }, + { url = "https://files.pythonhosted.org/packages/58/be/7482c8a5ebebbc6470b3eb791812fff7d5e0216c2be3827b30b8bb6603ed/pillow-12.2.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:2d192a155bbcec180f8564f693e6fd9bccff5a7af9b32e2e4bf8c9c69dbad6b5", size = 5308279, upload-time = "2026-04-01T14:43:13.246Z" }, + { url = "https://files.pythonhosted.org/packages/d8/95/0a351b9289c2b5cbde0bacd4a83ebc44023e835490a727b2a3bd60ddc0f4/pillow-12.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f3f40b3c5a968281fd507d519e444c35f0ff171237f4fdde090dd60699458421", size = 4695490, upload-time = "2026-04-01T14:43:15.584Z" }, + { url = "https://files.pythonhosted.org/packages/de/af/4e8e6869cbed569d43c416fad3dc4ecb944cb5d9492defaed89ddd6fe871/pillow-12.2.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:03e7e372d5240cc23e9f07deca4d775c0817bffc641b01e9c3af208dbd300987", size = 6284462, upload-time = "2026-04-01T14:43:18.268Z" }, + { url = "https://files.pythonhosted.org/packages/e9/9e/c05e19657fd57841e476be1ab46c4d501bffbadbafdc31a6d665f8b737b6/pillow-12.2.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:b86024e52a1b269467a802258c25521e6d742349d760728092e1bc2d135b4d76", size = 8094744, upload-time = "2026-04-01T14:43:20.716Z" }, + { url = "https://files.pythonhosted.org/packages/2b/54/1789c455ed10176066b6e7e6da1b01e50e36f94ba584dc68d9eebfe9156d/pillow-12.2.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7371b48c4fa448d20d2714c9a1f775a81155050d383333e0a6c15b1123dda005", size = 6398371, upload-time = "2026-04-01T14:43:23.443Z" }, + { url = "https://files.pythonhosted.org/packages/43/e3/fdc657359e919462369869f1c9f0e973f353f9a9ee295a39b1fea8ee1a77/pillow-12.2.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:62f5409336adb0663b7caa0da5c7d9e7bdbaae9ce761d34669420c2a801b2780", size = 7087215, upload-time = "2026-04-01T14:43:26.758Z" }, + { url = "https://files.pythonhosted.org/packages/8b/f8/2f6825e441d5b1959d2ca5adec984210f1ec086435b0ed5f52c19b3b8a6e/pillow-12.2.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:01afa7cf67f74f09523699b4e88c73fb55c13346d212a59a2db1f86b0a63e8c5", size = 6509783, upload-time = "2026-04-01T14:43:29.56Z" }, + { url = "https://files.pythonhosted.org/packages/67/f9/029a27095ad20f854f9dba026b3ea6428548316e057e6fc3545409e86651/pillow-12.2.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fc3d34d4a8fbec3e88a79b92e5465e0f9b842b628675850d860b8bd300b159f5", size = 7212112, upload-time = "2026-04-01T14:43:32.091Z" }, + { url = "https://files.pythonhosted.org/packages/be/42/025cfe05d1be22dbfdb4f264fe9de1ccda83f66e4fc3aac94748e784af04/pillow-12.2.0-cp312-cp312-win32.whl", hash = "sha256:58f62cc0f00fd29e64b29f4fd923ffdb3859c9f9e6105bfc37ba1d08994e8940", size = 6378489, upload-time = "2026-04-01T14:43:34.601Z" }, + { url = "https://files.pythonhosted.org/packages/5d/7b/25a221d2c761c6a8ae21bfa3874988ff2583e19cf8a27bf2fee358df7942/pillow-12.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:7f84204dee22a783350679a0333981df803dac21a0190d706a50475e361c93f5", size = 7084129, upload-time = "2026-04-01T14:43:37.213Z" }, + { url = "https://files.pythonhosted.org/packages/10/e1/542a474affab20fd4a0f1836cb234e8493519da6b76899e30bcc5d990b8b/pillow-12.2.0-cp312-cp312-win_arm64.whl", hash = "sha256:af73337013e0b3b46f175e79492d96845b16126ddf79c438d7ea7ff27783a414", size = 2463612, upload-time = "2026-04-01T14:43:39.421Z" }, + { url = "https://files.pythonhosted.org/packages/4e/b7/2437044fb910f499610356d1352e3423753c98e34f915252aafecc64889f/pillow-12.2.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:0538bd5e05efec03ae613fd89c4ce0368ecd2ba239cc25b9f9be7ed426b0af1f", size = 5273969, upload-time = "2026-04-01T14:45:55.538Z" }, + { url = "https://files.pythonhosted.org/packages/f6/f4/8316e31de11b780f4ac08ef3654a75555e624a98db1056ecb2122d008d5a/pillow-12.2.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:394167b21da716608eac917c60aa9b969421b5dcbbe02ae7f013e7b85811c69d", size = 4659674, upload-time = "2026-04-01T14:45:58.093Z" }, + { url = "https://files.pythonhosted.org/packages/d4/37/664fca7201f8bb2aa1d20e2c3d5564a62e6ae5111741966c8319ca802361/pillow-12.2.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5d04bfa02cc2d23b497d1e90a0f927070043f6cbf303e738300532379a4b4e0f", size = 5288479, upload-time = "2026-04-01T14:46:01.141Z" }, + { url = "https://files.pythonhosted.org/packages/49/62/5b0ed78fce87346be7a5cfcfaaad91f6a1f98c26f86bdbafa2066c647ef6/pillow-12.2.0-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0c838a5125cee37e68edec915651521191cef1e6aa336b855f495766e77a366e", size = 7032230, upload-time = "2026-04-01T14:46:03.874Z" }, + { url = "https://files.pythonhosted.org/packages/c3/28/ec0fc38107fc32536908034e990c47914c57cd7c5a3ece4d8d8f7ffd7e27/pillow-12.2.0-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4a6c9fa44005fa37a91ebfc95d081e8079757d2e904b27103f4f5fa6f0bf78c0", size = 5355404, upload-time = "2026-04-01T14:46:06.33Z" }, + { url = "https://files.pythonhosted.org/packages/5e/8b/51b0eddcfa2180d60e41f06bd6d0a62202b20b59c68f5a132e615b75aecf/pillow-12.2.0-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:25373b66e0dd5905ed63fa3cae13c82fbddf3079f2c8bf15c6fb6a35586324c1", size = 6002215, upload-time = "2026-04-01T14:46:08.83Z" }, + { url = "https://files.pythonhosted.org/packages/bc/60/5382c03e1970de634027cee8e1b7d39776b778b81812aaf45b694dfe9e28/pillow-12.2.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:bfa9c230d2fe991bed5318a5f119bd6780cda2915cca595393649fc118ab895e", size = 7080946, upload-time = "2026-04-01T14:46:11.734Z" }, ] [[package]] name = "platformdirs" -version = "4.3.8" +version = "4.9.4" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/fe/8b/3c73abc9c759ecd3f1f7ceff6685840859e8070c4d947c93fae71f6a0bf2/platformdirs-4.3.8.tar.gz", hash = "sha256:3d512d96e16bcb959a814c9f348431070822a6496326a4be0911c40b5a74c2bc", size = 21362, upload-time = "2025-05-07T22:47:42.121Z" } +sdist = { url = "https://files.pythonhosted.org/packages/19/56/8d4c30c8a1d07013911a8fdbd8f89440ef9f08d07a1b50ab8ca8be5a20f9/platformdirs-4.9.4.tar.gz", hash = "sha256:1ec356301b7dc906d83f371c8f487070e99d3ccf9e501686456394622a01a934", size = 28737, upload-time = "2026-03-05T18:34:13.271Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/fe/39/979e8e21520d4e47a0bbe349e2713c0aac6f3d853d0e5b34d76206c439aa/platformdirs-4.3.8-py3-none-any.whl", hash = "sha256:ff7059bb7eb1179e2685604f4aaf157cfd9535242bd23742eadc3c13542139b4", size = 18567, upload-time = "2025-05-07T22:47:40.376Z" }, + { url = "https://files.pythonhosted.org/packages/63/d7/97f7e3a6abb67d8080dd406fd4df842c2be0efaf712d1c899c32a075027c/platformdirs-4.9.4-py3-none-any.whl", hash = "sha256:68a9a4619a666ea6439f2ff250c12a853cd1cbd5158d258bd824a7df6be2f868", size = 21216, upload-time = "2026-03-05T18:34:12.172Z" }, ] [[package]] @@ -2994,7 +10843,7 @@ name = "portalocker" version = "3.2.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "pywin32", marker = "sys_platform == 'win32' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "pywin32", marker = "sys_platform == 'win32' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, ] sdist = { url = "https://files.pythonhosted.org/packages/5e/77/65b857a69ed876e1951e88aaba60f5ce6120c33703f7cb61a3c894b8c1b6/portalocker-3.2.0.tar.gz", hash = "sha256:1f3002956a54a8c3730586c5c77bf18fae4149e07eaf1c29fc3faf4d5a3f89ac", size = 95644, upload-time = "2025-06-14T13:20:40.03Z" } wheels = [ @@ -3013,33 +10862,111 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/84/03/0d3ce49e2505ae70cf43bc5bb3033955d2fc9f932163e84dc0779cc47f48/prompt_toolkit-3.0.52-py3-none-any.whl", hash = "sha256:9aac639a3bbd33284347de5ad8d68ecc044b91a762dc39b7c21095fcd6a19955", size = 391431, upload-time = "2025-08-27T15:23:59.498Z" }, ] +[[package]] +name = "propcache" +version = "0.4.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/9e/da/e9fc233cf63743258bff22b3dfa7ea5baef7b5bc324af47a0ad89b8ffc6f/propcache-0.4.1.tar.gz", hash = "sha256:f48107a8c637e80362555f37ecf49abe20370e557cc4ab374f04ec4423c97c3d", size = 46442, upload-time = "2025-10-08T19:49:02.291Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3c/0e/934b541323035566a9af292dba85a195f7b78179114f2c6ebb24551118a9/propcache-0.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7c2d1fa3201efaf55d730400d945b5b3ab6e672e100ba0f9a409d950ab25d7db", size = 79534, upload-time = "2025-10-08T19:46:02.083Z" }, + { url = "https://files.pythonhosted.org/packages/a1/6b/db0d03d96726d995dc7171286c6ba9d8d14251f37433890f88368951a44e/propcache-0.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1eb2994229cc8ce7fe9b3db88f5465f5fd8651672840b2e426b88cdb1a30aac8", size = 45526, upload-time = "2025-10-08T19:46:03.884Z" }, + { url = "https://files.pythonhosted.org/packages/e4/c3/82728404aea669e1600f304f2609cde9e665c18df5a11cdd57ed73c1dceb/propcache-0.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:66c1f011f45a3b33d7bcb22daed4b29c0c9e2224758b6be00686731e1b46f925", size = 47263, upload-time = "2025-10-08T19:46:05.405Z" }, + { url = "https://files.pythonhosted.org/packages/df/1b/39313ddad2bf9187a1432654c38249bab4562ef535ef07f5eb6eb04d0b1b/propcache-0.4.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9a52009f2adffe195d0b605c25ec929d26b36ef986ba85244891dee3b294df21", size = 201012, upload-time = "2025-10-08T19:46:07.165Z" }, + { url = "https://files.pythonhosted.org/packages/5b/01/f1d0b57d136f294a142acf97f4ed58c8e5b974c21e543000968357115011/propcache-0.4.1-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:5d4e2366a9c7b837555cf02fb9be2e3167d333aff716332ef1b7c3a142ec40c5", size = 209491, upload-time = "2025-10-08T19:46:08.909Z" }, + { url = "https://files.pythonhosted.org/packages/a1/c8/038d909c61c5bb039070b3fb02ad5cccdb1dde0d714792e251cdb17c9c05/propcache-0.4.1-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:9d2b6caef873b4f09e26ea7e33d65f42b944837563a47a94719cc3544319a0db", size = 215319, upload-time = "2025-10-08T19:46:10.7Z" }, + { url = "https://files.pythonhosted.org/packages/08/57/8c87e93142b2c1fa2408e45695205a7ba05fb5db458c0bf5c06ba0e09ea6/propcache-0.4.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2b16ec437a8c8a965ecf95739448dd938b5c7f56e67ea009f4300d8df05f32b7", size = 196856, upload-time = "2025-10-08T19:46:12.003Z" }, + { url = "https://files.pythonhosted.org/packages/42/df/5615fec76aa561987a534759b3686008a288e73107faa49a8ae5795a9f7a/propcache-0.4.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:296f4c8ed03ca7476813fe666c9ea97869a8d7aec972618671b33a38a5182ef4", size = 193241, upload-time = "2025-10-08T19:46:13.495Z" }, + { url = "https://files.pythonhosted.org/packages/d5/21/62949eb3a7a54afe8327011c90aca7e03547787a88fb8bd9726806482fea/propcache-0.4.1-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:1f0978529a418ebd1f49dad413a2b68af33f85d5c5ca5c6ca2a3bed375a7ac60", size = 190552, upload-time = "2025-10-08T19:46:14.938Z" }, + { url = "https://files.pythonhosted.org/packages/30/ee/ab4d727dd70806e5b4de96a798ae7ac6e4d42516f030ee60522474b6b332/propcache-0.4.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:fd138803047fb4c062b1c1dd95462f5209456bfab55c734458f15d11da288f8f", size = 200113, upload-time = "2025-10-08T19:46:16.695Z" }, + { url = "https://files.pythonhosted.org/packages/8a/0b/38b46208e6711b016aa8966a3ac793eee0d05c7159d8342aa27fc0bc365e/propcache-0.4.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:8c9b3cbe4584636d72ff556d9036e0c9317fa27b3ac1f0f558e7e84d1c9c5900", size = 200778, upload-time = "2025-10-08T19:46:18.023Z" }, + { url = "https://files.pythonhosted.org/packages/cf/81/5abec54355ed344476bee711e9f04815d4b00a311ab0535599204eecc257/propcache-0.4.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f93243fdc5657247533273ac4f86ae106cc6445a0efacb9a1bfe982fcfefd90c", size = 193047, upload-time = "2025-10-08T19:46:19.449Z" }, + { url = "https://files.pythonhosted.org/packages/ec/b6/1f237c04e32063cb034acd5f6ef34ef3a394f75502e72703545631ab1ef6/propcache-0.4.1-cp310-cp310-win32.whl", hash = "sha256:a0ee98db9c5f80785b266eb805016e36058ac72c51a064040f2bc43b61101cdb", size = 38093, upload-time = "2025-10-08T19:46:20.643Z" }, + { url = "https://files.pythonhosted.org/packages/a6/67/354aac4e0603a15f76439caf0427781bcd6797f370377f75a642133bc954/propcache-0.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:1cdb7988c4e5ac7f6d175a28a9aa0c94cb6f2ebe52756a3c0cda98d2809a9e37", size = 41638, upload-time = "2025-10-08T19:46:21.935Z" }, + { url = "https://files.pythonhosted.org/packages/e0/e1/74e55b9fd1a4c209ff1a9a824bf6c8b3d1fc5a1ac3eabe23462637466785/propcache-0.4.1-cp310-cp310-win_arm64.whl", hash = "sha256:d82ad62b19645419fe79dd63b3f9253e15b30e955c0170e5cebc350c1844e581", size = 38229, upload-time = "2025-10-08T19:46:23.368Z" }, + { url = "https://files.pythonhosted.org/packages/8c/d4/4e2c9aaf7ac2242b9358f98dccd8f90f2605402f5afeff6c578682c2c491/propcache-0.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:60a8fda9644b7dfd5dece8c61d8a85e271cb958075bfc4e01083c148b61a7caf", size = 80208, upload-time = "2025-10-08T19:46:24.597Z" }, + { url = "https://files.pythonhosted.org/packages/c2/21/d7b68e911f9c8e18e4ae43bdbc1e1e9bbd971f8866eb81608947b6f585ff/propcache-0.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c30b53e7e6bda1d547cabb47c825f3843a0a1a42b0496087bb58d8fedf9f41b5", size = 45777, upload-time = "2025-10-08T19:46:25.733Z" }, + { url = "https://files.pythonhosted.org/packages/d3/1d/11605e99ac8ea9435651ee71ab4cb4bf03f0949586246476a25aadfec54a/propcache-0.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6918ecbd897443087a3b7cd978d56546a812517dcaaca51b49526720571fa93e", size = 47647, upload-time = "2025-10-08T19:46:27.304Z" }, + { url = "https://files.pythonhosted.org/packages/58/1a/3c62c127a8466c9c843bccb503d40a273e5cc69838805f322e2826509e0d/propcache-0.4.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3d902a36df4e5989763425a8ab9e98cd8ad5c52c823b34ee7ef307fd50582566", size = 214929, upload-time = "2025-10-08T19:46:28.62Z" }, + { url = "https://files.pythonhosted.org/packages/56/b9/8fa98f850960b367c4b8fe0592e7fc341daa7a9462e925228f10a60cf74f/propcache-0.4.1-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a9695397f85973bb40427dedddf70d8dc4a44b22f1650dd4af9eedf443d45165", size = 221778, upload-time = "2025-10-08T19:46:30.358Z" }, + { url = "https://files.pythonhosted.org/packages/46/a6/0ab4f660eb59649d14b3d3d65c439421cf2f87fe5dd68591cbe3c1e78a89/propcache-0.4.1-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2bb07ffd7eaad486576430c89f9b215f9e4be68c4866a96e97db9e97fead85dc", size = 228144, upload-time = "2025-10-08T19:46:32.607Z" }, + { url = "https://files.pythonhosted.org/packages/52/6a/57f43e054fb3d3a56ac9fc532bc684fc6169a26c75c353e65425b3e56eef/propcache-0.4.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fd6f30fdcf9ae2a70abd34da54f18da086160e4d7d9251f81f3da0ff84fc5a48", size = 210030, upload-time = "2025-10-08T19:46:33.969Z" }, + { url = "https://files.pythonhosted.org/packages/40/e2/27e6feebb5f6b8408fa29f5efbb765cd54c153ac77314d27e457a3e993b7/propcache-0.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:fc38cba02d1acba4e2869eef1a57a43dfbd3d49a59bf90dda7444ec2be6a5570", size = 208252, upload-time = "2025-10-08T19:46:35.309Z" }, + { url = "https://files.pythonhosted.org/packages/9e/f8/91c27b22ccda1dbc7967f921c42825564fa5336a01ecd72eb78a9f4f53c2/propcache-0.4.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:67fad6162281e80e882fb3ec355398cf72864a54069d060321f6cd0ade95fe85", size = 202064, upload-time = "2025-10-08T19:46:36.993Z" }, + { url = "https://files.pythonhosted.org/packages/f2/26/7f00bd6bd1adba5aafe5f4a66390f243acab58eab24ff1a08bebb2ef9d40/propcache-0.4.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f10207adf04d08bec185bae14d9606a1444715bc99180f9331c9c02093e1959e", size = 212429, upload-time = "2025-10-08T19:46:38.398Z" }, + { url = "https://files.pythonhosted.org/packages/84/89/fd108ba7815c1117ddca79c228f3f8a15fc82a73bca8b142eb5de13b2785/propcache-0.4.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:e9b0d8d0845bbc4cfcdcbcdbf5086886bc8157aa963c31c777ceff7846c77757", size = 216727, upload-time = "2025-10-08T19:46:39.732Z" }, + { url = "https://files.pythonhosted.org/packages/79/37/3ec3f7e3173e73f1d600495d8b545b53802cbf35506e5732dd8578db3724/propcache-0.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:981333cb2f4c1896a12f4ab92a9cc8f09ea664e9b7dbdc4eff74627af3a11c0f", size = 205097, upload-time = "2025-10-08T19:46:41.025Z" }, + { url = "https://files.pythonhosted.org/packages/61/b0/b2631c19793f869d35f47d5a3a56fb19e9160d3c119f15ac7344fc3ccae7/propcache-0.4.1-cp311-cp311-win32.whl", hash = "sha256:f1d2f90aeec838a52f1c1a32fe9a619fefd5e411721a9117fbf82aea638fe8a1", size = 38084, upload-time = "2025-10-08T19:46:42.693Z" }, + { url = "https://files.pythonhosted.org/packages/f4/78/6cce448e2098e9f3bfc91bb877f06aa24b6ccace872e39c53b2f707c4648/propcache-0.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:364426a62660f3f699949ac8c621aad6977be7126c5807ce48c0aeb8e7333ea6", size = 41637, upload-time = "2025-10-08T19:46:43.778Z" }, + { url = "https://files.pythonhosted.org/packages/9c/e9/754f180cccd7f51a39913782c74717c581b9cc8177ad0e949f4d51812383/propcache-0.4.1-cp311-cp311-win_arm64.whl", hash = "sha256:e53f3a38d3510c11953f3e6a33f205c6d1b001129f972805ca9b42fc308bc239", size = 38064, upload-time = "2025-10-08T19:46:44.872Z" }, + { url = "https://files.pythonhosted.org/packages/a2/0f/f17b1b2b221d5ca28b4b876e8bb046ac40466513960646bda8e1853cdfa2/propcache-0.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e153e9cd40cc8945138822807139367f256f89c6810c2634a4f6902b52d3b4e2", size = 80061, upload-time = "2025-10-08T19:46:46.075Z" }, + { url = "https://files.pythonhosted.org/packages/76/47/8ccf75935f51448ba9a16a71b783eb7ef6b9ee60f5d14c7f8a8a79fbeed7/propcache-0.4.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cd547953428f7abb73c5ad82cbb32109566204260d98e41e5dfdc682eb7f8403", size = 46037, upload-time = "2025-10-08T19:46:47.23Z" }, + { url = "https://files.pythonhosted.org/packages/0a/b6/5c9a0e42df4d00bfb4a3cbbe5cf9f54260300c88a0e9af1f47ca5ce17ac0/propcache-0.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f048da1b4f243fc44f205dfd320933a951b8d89e0afd4c7cacc762a8b9165207", size = 47324, upload-time = "2025-10-08T19:46:48.384Z" }, + { url = "https://files.pythonhosted.org/packages/9e/d3/6c7ee328b39a81ee877c962469f1e795f9db87f925251efeb0545e0020d0/propcache-0.4.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ec17c65562a827bba85e3872ead335f95405ea1674860d96483a02f5c698fa72", size = 225505, upload-time = "2025-10-08T19:46:50.055Z" }, + { url = "https://files.pythonhosted.org/packages/01/5d/1c53f4563490b1d06a684742cc6076ef944bc6457df6051b7d1a877c057b/propcache-0.4.1-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:405aac25c6394ef275dee4c709be43745d36674b223ba4eb7144bf4d691b7367", size = 230242, upload-time = "2025-10-08T19:46:51.815Z" }, + { url = "https://files.pythonhosted.org/packages/20/e1/ce4620633b0e2422207c3cb774a0ee61cac13abc6217763a7b9e2e3f4a12/propcache-0.4.1-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0013cb6f8dde4b2a2f66903b8ba740bdfe378c943c4377a200551ceb27f379e4", size = 238474, upload-time = "2025-10-08T19:46:53.208Z" }, + { url = "https://files.pythonhosted.org/packages/46/4b/3aae6835b8e5f44ea6a68348ad90f78134047b503765087be2f9912140ea/propcache-0.4.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:15932ab57837c3368b024473a525e25d316d8353016e7cc0e5ba9eb343fbb1cf", size = 221575, upload-time = "2025-10-08T19:46:54.511Z" }, + { url = "https://files.pythonhosted.org/packages/6e/a5/8a5e8678bcc9d3a1a15b9a29165640d64762d424a16af543f00629c87338/propcache-0.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:031dce78b9dc099f4c29785d9cf5577a3faf9ebf74ecbd3c856a7b92768c3df3", size = 216736, upload-time = "2025-10-08T19:46:56.212Z" }, + { url = "https://files.pythonhosted.org/packages/f1/63/b7b215eddeac83ca1c6b934f89d09a625aa9ee4ba158338854c87210cc36/propcache-0.4.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:ab08df6c9a035bee56e31af99be621526bd237bea9f32def431c656b29e41778", size = 213019, upload-time = "2025-10-08T19:46:57.595Z" }, + { url = "https://files.pythonhosted.org/packages/57/74/f580099a58c8af587cac7ba19ee7cb418506342fbbe2d4a4401661cca886/propcache-0.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4d7af63f9f93fe593afbf104c21b3b15868efb2c21d07d8732c0c4287e66b6a6", size = 220376, upload-time = "2025-10-08T19:46:59.067Z" }, + { url = "https://files.pythonhosted.org/packages/c4/ee/542f1313aff7eaf19c2bb758c5d0560d2683dac001a1c96d0774af799843/propcache-0.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:cfc27c945f422e8b5071b6e93169679e4eb5bf73bbcbf1ba3ae3a83d2f78ebd9", size = 226988, upload-time = "2025-10-08T19:47:00.544Z" }, + { url = "https://files.pythonhosted.org/packages/8f/18/9c6b015dd9c6930f6ce2229e1f02fb35298b847f2087ea2b436a5bfa7287/propcache-0.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:35c3277624a080cc6ec6f847cbbbb5b49affa3598c4535a0a4682a697aaa5c75", size = 215615, upload-time = "2025-10-08T19:47:01.968Z" }, + { url = "https://files.pythonhosted.org/packages/80/9e/e7b85720b98c45a45e1fca6a177024934dc9bc5f4d5dd04207f216fc33ed/propcache-0.4.1-cp312-cp312-win32.whl", hash = "sha256:671538c2262dadb5ba6395e26c1731e1d52534bfe9ae56d0b5573ce539266aa8", size = 38066, upload-time = "2025-10-08T19:47:03.503Z" }, + { url = "https://files.pythonhosted.org/packages/54/09/d19cff2a5aaac632ec8fc03737b223597b1e347416934c1b3a7df079784c/propcache-0.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:cb2d222e72399fcf5890d1d5cc1060857b9b236adff2792ff48ca2dfd46c81db", size = 41655, upload-time = "2025-10-08T19:47:04.973Z" }, + { url = "https://files.pythonhosted.org/packages/68/ab/6b5c191bb5de08036a8c697b265d4ca76148efb10fa162f14af14fb5f076/propcache-0.4.1-cp312-cp312-win_arm64.whl", hash = "sha256:204483131fb222bdaaeeea9f9e6c6ed0cac32731f75dfc1d4a567fc1926477c1", size = 37789, upload-time = "2025-10-08T19:47:06.077Z" }, + { url = "https://files.pythonhosted.org/packages/5b/5a/bc7b4a4ef808fa59a816c17b20c4bef6884daebbdf627ff2a161da67da19/propcache-0.4.1-py3-none-any.whl", hash = "sha256:af2a6052aeb6cf17d3e46ee169099044fd8224cbaf75c76a2ef596e8163e2237", size = 13305, upload-time = "2025-10-08T19:49:00.792Z" }, +] + +[[package]] +name = "properdocs" +version = "1.6.7" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "colorama", marker = "sys_platform == 'win32' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "ghp-import" }, + { name = "jinja2" }, + { name = "markdown" }, + { name = "markupsafe" }, + { name = "packaging" }, + { name = "pathspec" }, + { name = "platformdirs" }, + { name = "pyyaml" }, + { name = "pyyaml-env-tag" }, + { name = "watchdog" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ec/29/f27a4e1eddf72ed3db6e47818fbafe6debbf09fd7051f9c1a007239b46ef/properdocs-1.6.7.tar.gz", hash = "sha256:adc7b16e562890af0e098a7e5b02e3a81c20894a87d6a28d345c9300de73c26e", size = 276141, upload-time = "2026-03-20T20:07:48.167Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bd/4d/fc923f5c85318ee8cc903566dc4e0ebe41b2dfc1d2ecf5546db232397ed6/properdocs-1.6.7-py3-none-any.whl", hash = "sha256:6fa0cfa2e01bf338f684892c8a506cf70ea88ae7f3479c933b6fa20168101cbd", size = 225406, upload-time = "2026-03-20T20:07:46.875Z" }, +] + [[package]] name = "protobuf" -version = "4.25.8" +version = "4.25.9" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/df/01/34c8d2b6354906d728703cb9d546a0e534de479e25f1b581e4094c4a85cc/protobuf-4.25.8.tar.gz", hash = "sha256:6135cf8affe1fc6f76cced2641e4ea8d3e59518d1f24ae41ba97bcad82d397cd", size = 380920, upload-time = "2025-05-28T14:22:25.153Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d9/8e/d08c41a8c004e1d437ef467e7c4f9c3295cd784eba48ed5d1d01f94b1dad/protobuf-4.25.9.tar.gz", hash = "sha256:b0dc7e7c68de8b1ce831dacb12fb407e838edbb8b6cc0dc3a2a6b4cbf6de9cff", size = 381040, upload-time = "2026-03-25T23:09:36.423Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/45/ff/05f34305fe6b85bbfbecbc559d423a5985605cad5eda4f47eae9e9c9c5c5/protobuf-4.25.8-cp310-abi3-win32.whl", hash = "sha256:504435d831565f7cfac9f0714440028907f1975e4bed228e58e72ecfff58a1e0", size = 392745, upload-time = "2025-05-28T14:22:10.524Z" }, - { url = "https://files.pythonhosted.org/packages/08/35/8b8a8405c564caf4ba835b1fdf554da869954712b26d8f2a98c0e434469b/protobuf-4.25.8-cp310-abi3-win_amd64.whl", hash = "sha256:bd551eb1fe1d7e92c1af1d75bdfa572eff1ab0e5bf1736716814cdccdb2360f9", size = 413736, upload-time = "2025-05-28T14:22:13.156Z" }, - { url = "https://files.pythonhosted.org/packages/28/d7/ab27049a035b258dab43445eb6ec84a26277b16105b277cbe0a7698bdc6c/protobuf-4.25.8-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:ca809b42f4444f144f2115c4c1a747b9a404d590f18f37e9402422033e464e0f", size = 394537, upload-time = "2025-05-28T14:22:14.768Z" }, - { url = "https://files.pythonhosted.org/packages/bd/6d/a4a198b61808dd3d1ee187082ccc21499bc949d639feb948961b48be9a7e/protobuf-4.25.8-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:9ad7ef62d92baf5a8654fbb88dac7fa5594cfa70fd3440488a5ca3bfc6d795a7", size = 294005, upload-time = "2025-05-28T14:22:16.052Z" }, - { url = "https://files.pythonhosted.org/packages/d6/c6/c9deaa6e789b6fc41b88ccbdfe7a42d2b82663248b715f55aa77fbc00724/protobuf-4.25.8-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:83e6e54e93d2b696a92cad6e6efc924f3850f82b52e1563778dfab8b355101b0", size = 294924, upload-time = "2025-05-28T14:22:17.105Z" }, - { url = "https://files.pythonhosted.org/packages/0c/c1/6aece0ab5209981a70cd186f164c133fdba2f51e124ff92b73de7fd24d78/protobuf-4.25.8-py3-none-any.whl", hash = "sha256:15a0af558aa3b13efef102ae6e4f3efac06f1eea11afb3a57db2901447d9fb59", size = 156757, upload-time = "2025-05-28T14:22:24.135Z" }, + { url = "https://files.pythonhosted.org/packages/a8/e9/59435bd04bdd46cb38c42a336b22f9843e8e586ff83c35a5423f8b14704e/protobuf-4.25.9-cp310-abi3-win32.whl", hash = "sha256:bde396f568b0b46fc8fbfe9f02facf25b6755b2578a3b8ac61e74b9d69499e03", size = 392879, upload-time = "2026-03-25T23:09:21.32Z" }, + { url = "https://files.pythonhosted.org/packages/f3/16/42a5c7f1001783d2b5bfcecde10127f09010f78982c86ae409122ce3ece6/protobuf-4.25.9-cp310-abi3-win_amd64.whl", hash = "sha256:3683c05154252206f7cb2d371626514b3708199d9bcf683b503dabf3a2e38e06", size = 413900, upload-time = "2026-03-25T23:09:23.589Z" }, + { url = "https://files.pythonhosted.org/packages/56/5b/0074a0a9eb01f3d1c4648ca5e81b22090c811b210b61df9018ac6d6c5cda/protobuf-4.25.9-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:9560813560e6ee72c11ca8873878bdb7ee003c96a57ebb013245fe84e2540904", size = 394826, upload-time = "2026-03-25T23:09:25.194Z" }, + { url = "https://files.pythonhosted.org/packages/54/aa/b2dba856f64c36b2a06c67be1472de98cca07a2322d0f0cbf03279a40e5b/protobuf-4.25.9-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:999146ef02e7fa6a692477badd1528bcd7268df211852a3df2d834ba2b480791", size = 294191, upload-time = "2026-03-25T23:09:26.613Z" }, + { url = "https://files.pythonhosted.org/packages/a8/5c/53f18822017b8bda6bd8bb4e02048e911fdc79a3dafdc83ab994fe922a84/protobuf-4.25.9-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:438c636de8fb706a0de94a12a268ef1ae8f5ba5ae655a7671fcda5968ba3c9be", size = 295178, upload-time = "2026-03-25T23:09:27.839Z" }, + { url = "https://files.pythonhosted.org/packages/16/28/d5065b212685875d3924bcdb3201cbf467cb4d58a18aa19a8dfd99ea80a9/protobuf-4.25.9-py3-none-any.whl", hash = "sha256:d49b615e7c935194ac161f0965699ac84df6112c378e05ec53da65d2e4cbb6d4", size = 156822, upload-time = "2026-03-25T23:09:34.957Z" }, ] [[package]] name = "psutil" -version = "7.0.0" +version = "7.2.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/2a/80/336820c1ad9286a4ded7e845b2eccfcb27851ab8ac6abece774a6ff4d3de/psutil-7.0.0.tar.gz", hash = "sha256:7be9c3eba38beccb6495ea33afd982a44074b78f28c434a1f51cc07fd315c456", size = 497003, upload-time = "2025-02-13T21:54:07.946Z" } +sdist = { url = "https://files.pythonhosted.org/packages/aa/c6/d1ddf4abb55e93cebc4f2ed8b5d6dbad109ecb8d63748dd2b20ab5e57ebe/psutil-7.2.2.tar.gz", hash = "sha256:0746f5f8d406af344fd547f1c8daa5f5c33dbc293bb8d6a16d80b4bb88f59372", size = 493740, upload-time = "2026-01-28T18:14:54.428Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ed/e6/2d26234410f8b8abdbf891c9da62bee396583f713fb9f3325a4760875d22/psutil-7.0.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:101d71dc322e3cffd7cea0650b09b3d08b8e7c4109dd6809fe452dfd00e58b25", size = 238051, upload-time = "2025-02-13T21:54:12.36Z" }, - { url = "https://files.pythonhosted.org/packages/04/8b/30f930733afe425e3cbfc0e1468a30a18942350c1a8816acfade80c005c4/psutil-7.0.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:39db632f6bb862eeccf56660871433e111b6ea58f2caea825571951d4b6aa3da", size = 239535, upload-time = "2025-02-13T21:54:16.07Z" }, - { url = "https://files.pythonhosted.org/packages/2a/ed/d362e84620dd22876b55389248e522338ed1bf134a5edd3b8231d7207f6d/psutil-7.0.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1fcee592b4c6f146991ca55919ea3d1f8926497a713ed7faaf8225e174581e91", size = 275004, upload-time = "2025-02-13T21:54:18.662Z" }, - { url = "https://files.pythonhosted.org/packages/bf/b9/b0eb3f3cbcb734d930fdf839431606844a825b23eaf9a6ab371edac8162c/psutil-7.0.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b1388a4f6875d7e2aff5c4ca1cc16c545ed41dd8bb596cefea80111db353a34", size = 277986, upload-time = "2025-02-13T21:54:21.811Z" }, - { url = "https://files.pythonhosted.org/packages/eb/a2/709e0fe2f093556c17fbafda93ac032257242cabcc7ff3369e2cb76a97aa/psutil-7.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5f098451abc2828f7dc6b58d44b532b22f2088f4999a937557b603ce72b1993", size = 279544, upload-time = "2025-02-13T21:54:24.68Z" }, - { url = "https://files.pythonhosted.org/packages/50/e6/eecf58810b9d12e6427369784efe814a1eec0f492084ce8eb8f4d89d6d61/psutil-7.0.0-cp37-abi3-win32.whl", hash = "sha256:ba3fcef7523064a6c9da440fc4d6bd07da93ac726b5733c29027d7dc95b39d99", size = 241053, upload-time = "2025-02-13T21:54:34.31Z" }, - { url = "https://files.pythonhosted.org/packages/50/1b/6921afe68c74868b4c9fa424dad3be35b095e16687989ebbb50ce4fceb7c/psutil-7.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:4cf3d4eb1aa9b348dec30105c55cd9b7d4629285735a102beb4441e38db90553", size = 244885, upload-time = "2025-02-13T21:54:37.486Z" }, + { url = "https://files.pythonhosted.org/packages/e7/36/5ee6e05c9bd427237b11b3937ad82bb8ad2752d72c6969314590dd0c2f6e/psutil-7.2.2-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ed0cace939114f62738d808fdcecd4c869222507e266e574799e9c0faa17d486", size = 129090, upload-time = "2026-01-28T18:15:22.168Z" }, + { url = "https://files.pythonhosted.org/packages/80/c4/f5af4c1ca8c1eeb2e92ccca14ce8effdeec651d5ab6053c589b074eda6e1/psutil-7.2.2-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:1a7b04c10f32cc88ab39cbf606e117fd74721c831c98a27dc04578deb0c16979", size = 129859, upload-time = "2026-01-28T18:15:23.795Z" }, + { url = "https://files.pythonhosted.org/packages/b5/70/5d8df3b09e25bce090399cf48e452d25c935ab72dad19406c77f4e828045/psutil-7.2.2-cp36-abi3-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:076a2d2f923fd4821644f5ba89f059523da90dc9014e85f8e45a5774ca5bc6f9", size = 155560, upload-time = "2026-01-28T18:15:25.976Z" }, + { url = "https://files.pythonhosted.org/packages/63/65/37648c0c158dc222aba51c089eb3bdfa238e621674dc42d48706e639204f/psutil-7.2.2-cp36-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b0726cecd84f9474419d67252add4ac0cd9811b04d61123054b9fb6f57df6e9e", size = 156997, upload-time = "2026-01-28T18:15:27.794Z" }, + { url = "https://files.pythonhosted.org/packages/8e/13/125093eadae863ce03c6ffdbae9929430d116a246ef69866dad94da3bfbc/psutil-7.2.2-cp36-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:fd04ef36b4a6d599bbdb225dd1d3f51e00105f6d48a28f006da7f9822f2606d8", size = 148972, upload-time = "2026-01-28T18:15:29.342Z" }, + { url = "https://files.pythonhosted.org/packages/04/78/0acd37ca84ce3ddffaa92ef0f571e073faa6d8ff1f0559ab1272188ea2be/psutil-7.2.2-cp36-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b58fabe35e80b264a4e3bb23e6b96f9e45a3df7fb7eed419ac0e5947c61e47cc", size = 148266, upload-time = "2026-01-28T18:15:31.597Z" }, + { url = "https://files.pythonhosted.org/packages/b4/90/e2159492b5426be0c1fef7acba807a03511f97c5f86b3caeda6ad92351a7/psutil-7.2.2-cp37-abi3-win_amd64.whl", hash = "sha256:eb7e81434c8d223ec4a219b5fc1c47d0417b12be7ea866e24fb5ad6e84b3d988", size = 137737, upload-time = "2026-01-28T18:15:33.849Z" }, + { url = "https://files.pythonhosted.org/packages/8c/c7/7bb2e321574b10df20cbde462a94e2b71d05f9bbda251ef27d104668306a/psutil-7.2.2-cp37-abi3-win_arm64.whl", hash = "sha256:8c233660f575a5a89e6d4cb65d9f938126312bca76d8fe087b947b3a1aaac9ee", size = 134617, upload-time = "2026-01-28T18:15:36.514Z" }, ] [[package]] @@ -3060,6 +10987,35 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/8e/37/efad0257dc6e593a18957422533ff0f87ede7c9c6ea010a2177d738fb82f/pure_eval-0.2.3-py3-none-any.whl", hash = "sha256:1db8e35b67b3d218d818ae653e27f06c3aa420901fa7b081ca98cbedc874e0d0", size = 11842, upload-time = "2024-07-21T12:58:20.04Z" }, ] +[[package]] +name = "pyarrow" +version = "23.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/88/22/134986a4cc224d593c1afde5494d18ff629393d74cc2eddb176669f234a4/pyarrow-23.0.1.tar.gz", hash = "sha256:b8c5873e33440b2bc2f4a79d2b47017a89c5a24116c055625e6f2ee50523f019", size = 1167336, upload-time = "2026-02-16T10:14:12.39Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bc/a8/24e5dc6855f50a62936ceb004e6e9645e4219a8065f304145d7fb8a79d5d/pyarrow-23.0.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:3fab8f82571844eb3c460f90a75583801d14ca0cc32b1acc8c361650e006fd56", size = 34307390, upload-time = "2026-02-16T10:08:08.654Z" }, + { url = "https://files.pythonhosted.org/packages/bc/8e/4be5617b4aaae0287f621ad31c6036e5f63118cfca0dc57d42121ff49b51/pyarrow-23.0.1-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:3f91c038b95f71ddfc865f11d5876c42f343b4495535bd262c7b321b0b94507c", size = 35853761, upload-time = "2026-02-16T10:08:17.811Z" }, + { url = "https://files.pythonhosted.org/packages/2e/08/3e56a18819462210432ae37d10f5c8eed3828be1d6c751b6e6a2e93c286a/pyarrow-23.0.1-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:d0744403adabef53c985a7f8a082b502a368510c40d184df349a0a8754533258", size = 44493116, upload-time = "2026-02-16T10:08:25.792Z" }, + { url = "https://files.pythonhosted.org/packages/f8/82/c40b68001dbec8a3faa4c08cd8c200798ac732d2854537c5449dc859f55a/pyarrow-23.0.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:c33b5bf406284fd0bba436ed6f6c3ebe8e311722b441d89397c54f871c6863a2", size = 47564532, upload-time = "2026-02-16T10:08:34.27Z" }, + { url = "https://files.pythonhosted.org/packages/20/bc/73f611989116b6f53347581b02177f9f620efdf3cd3f405d0e83cdf53a83/pyarrow-23.0.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ddf743e82f69dcd6dbbcb63628895d7161e04e56794ef80550ac6f3315eeb1d5", size = 48183685, upload-time = "2026-02-16T10:08:42.889Z" }, + { url = "https://files.pythonhosted.org/packages/b0/cc/6c6b3ecdae2a8c3aced99956187e8302fc954cc2cca2a37cf2111dad16ce/pyarrow-23.0.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e052a211c5ac9848ae15d5ec875ed0943c0221e2fcfe69eee80b604b4e703222", size = 50605582, upload-time = "2026-02-16T10:08:51.641Z" }, + { url = "https://files.pythonhosted.org/packages/8d/94/d359e708672878d7638a04a0448edf7c707f9e5606cee11e15aaa5c7535a/pyarrow-23.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:5abde149bb3ce524782d838eb67ac095cd3fd6090eba051130589793f1a7f76d", size = 27521148, upload-time = "2026-02-16T10:08:58.077Z" }, + { url = "https://files.pythonhosted.org/packages/b0/41/8e6b6ef7e225d4ceead8459427a52afdc23379768f54dd3566014d7618c1/pyarrow-23.0.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:6f0147ee9e0386f519c952cc670eb4a8b05caa594eeffe01af0e25f699e4e9bb", size = 34302230, upload-time = "2026-02-16T10:09:03.859Z" }, + { url = "https://files.pythonhosted.org/packages/bf/4a/1472c00392f521fea03ae93408bf445cc7bfa1ab81683faf9bc188e36629/pyarrow-23.0.1-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:0ae6e17c828455b6265d590100c295193f93cc5675eb0af59e49dbd00d2de350", size = 35850050, upload-time = "2026-02-16T10:09:11.877Z" }, + { url = "https://files.pythonhosted.org/packages/0c/b2/bd1f2f05ded56af7f54d702c8364c9c43cd6abb91b0e9933f3d77b4f4132/pyarrow-23.0.1-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:fed7020203e9ef273360b9e45be52a2a47d3103caf156a30ace5247ffb51bdbd", size = 44491918, upload-time = "2026-02-16T10:09:18.144Z" }, + { url = "https://files.pythonhosted.org/packages/0b/62/96459ef5b67957eac38a90f541d1c28833d1b367f014a482cb63f3b7cd2d/pyarrow-23.0.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:26d50dee49d741ac0e82185033488d28d35be4d763ae6f321f97d1140eb7a0e9", size = 47562811, upload-time = "2026-02-16T10:09:25.792Z" }, + { url = "https://files.pythonhosted.org/packages/7d/94/1170e235add1f5f45a954e26cd0e906e7e74e23392dcb560de471f7366ec/pyarrow-23.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:3c30143b17161310f151f4a2bcfe41b5ff744238c1039338779424e38579d701", size = 48183766, upload-time = "2026-02-16T10:09:34.645Z" }, + { url = "https://files.pythonhosted.org/packages/0e/2d/39a42af4570377b99774cdb47f63ee6c7da7616bd55b3d5001aa18edfe4f/pyarrow-23.0.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:db2190fa79c80a23fdd29fef4b8992893f024ae7c17d2f5f4db7171fa30c2c78", size = 50607669, upload-time = "2026-02-16T10:09:44.153Z" }, + { url = "https://files.pythonhosted.org/packages/00/ca/db94101c187f3df742133ac837e93b1f269ebdac49427f8310ee40b6a58f/pyarrow-23.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:f00f993a8179e0e1c9713bcc0baf6d6c01326a406a9c23495ec1ba9c9ebf2919", size = 27527698, upload-time = "2026-02-16T10:09:50.263Z" }, + { url = "https://files.pythonhosted.org/packages/9a/4b/4166bb5abbfe6f750fc60ad337c43ecf61340fa52ab386da6e8dbf9e63c4/pyarrow-23.0.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:f4b0dbfa124c0bb161f8b5ebb40f1a680b70279aa0c9901d44a2b5a20806039f", size = 34214575, upload-time = "2026-02-16T10:09:56.225Z" }, + { url = "https://files.pythonhosted.org/packages/e1/da/3f941e3734ac8088ea588b53e860baeddac8323ea40ce22e3d0baa865cc9/pyarrow-23.0.1-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:7707d2b6673f7de054e2e83d59f9e805939038eebe1763fe811ee8fa5c0cd1a7", size = 35832540, upload-time = "2026-02-16T10:10:03.428Z" }, + { url = "https://files.pythonhosted.org/packages/88/7c/3d841c366620e906d54430817531b877ba646310296df42ef697308c2705/pyarrow-23.0.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:86ff03fb9f1a320266e0de855dee4b17da6794c595d207f89bba40d16b5c78b9", size = 44470940, upload-time = "2026-02-16T10:10:10.704Z" }, + { url = "https://files.pythonhosted.org/packages/2c/a5/da83046273d990f256cb79796a190bbf7ec999269705ddc609403f8c6b06/pyarrow-23.0.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:813d99f31275919c383aab17f0f455a04f5a429c261cc411b1e9a8f5e4aaaa05", size = 47586063, upload-time = "2026-02-16T10:10:17.95Z" }, + { url = "https://files.pythonhosted.org/packages/5b/3c/b7d2ebcff47a514f47f9da1e74b7949138c58cfeb108cdd4ee62f43f0cf3/pyarrow-23.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:bf5842f960cddd2ef757d486041d57c96483efc295a8c4a0e20e704cbbf39c67", size = 48173045, upload-time = "2026-02-16T10:10:25.363Z" }, + { url = "https://files.pythonhosted.org/packages/43/b2/b40961262213beaba6acfc88698eb773dfce32ecdf34d19291db94c2bd73/pyarrow-23.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:564baf97c858ecc03ec01a41062e8f4698abc3e6e2acd79c01c2e97880a19730", size = 50621741, upload-time = "2026-02-16T10:10:33.477Z" }, + { url = "https://files.pythonhosted.org/packages/f6/70/1fdda42d65b28b078e93d75d371b2185a61da89dda4def8ba6ba41ebdeb4/pyarrow-23.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:07deae7783782ac7250989a7b2ecde9b3c343a643f82e8a4df03d93b633006f0", size = 27620678, upload-time = "2026-02-16T10:10:39.31Z" }, +] + [[package]] name = "pybase64" version = "1.0.2" @@ -3068,85 +11024,88 @@ sdist = { url = "https://files.pythonhosted.org/packages/38/b8/1732027d79ac822f1 [[package]] name = "pyclipper" -version = "1.3.0.post6" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/4a/b2/550fe500e49c464d73fabcb8cb04d47e4885d6ca4cfc1f5b0a125a95b19a/pyclipper-1.3.0.post6.tar.gz", hash = "sha256:42bff0102fa7a7f2abdd795a2594654d62b786d0c6cd67b72d469114fdeb608c", size = 165909, upload-time = "2024-10-18T12:23:09.069Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b5/34/0dca299fe41e9a92e78735502fed5238a4ac734755e624488df9b2eeec46/pyclipper-1.3.0.post6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:fa0f5e78cfa8262277bb3d0225537b3c2a90ef68fd90a229d5d24cf49955dcf4", size = 269504, upload-time = "2024-10-18T12:21:55.735Z" }, - { url = "https://files.pythonhosted.org/packages/8a/5b/81528b08134b3c2abdfae821e1eff975c0703802d41974b02dfb2e101c55/pyclipper-1.3.0.post6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a01f182d8938c1dc515e8508ed2442f7eebd2c25c7d5cb29281f583c1a8008a4", size = 142599, upload-time = "2024-10-18T12:21:57.401Z" }, - { url = "https://files.pythonhosted.org/packages/84/a4/3e304f6c0d000382cd54d4a1e5f0d8fc28e1ae97413a2ec1016a7b840319/pyclipper-1.3.0.post6-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:640f20975727994d4abacd07396f564e9e5665ba5cb66ceb36b300c281f84fa4", size = 912209, upload-time = "2024-10-18T12:21:59.408Z" }, - { url = "https://files.pythonhosted.org/packages/f5/6a/28ec55cc3f972368b211fca017e081cf5a71009d1b8ec3559767cda5b289/pyclipper-1.3.0.post6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a63002f6bb0f1efa87c0b81634cbb571066f237067e23707dabf746306c92ba5", size = 929511, upload-time = "2024-10-18T12:22:01.454Z" }, - { url = "https://files.pythonhosted.org/packages/c4/56/c326f3454c5f30a31f58a5c3154d891fce58ad73ccbf1d3f4aacfcbd344d/pyclipper-1.3.0.post6-cp310-cp310-win32.whl", hash = "sha256:106b8622cd9fb07d80cbf9b1d752334c55839203bae962376a8c59087788af26", size = 100126, upload-time = "2024-10-18T12:22:02.83Z" }, - { url = "https://files.pythonhosted.org/packages/f8/e6/f8239af6346848b20a3448c554782fe59298ab06c1d040490242dc7e3c26/pyclipper-1.3.0.post6-cp310-cp310-win_amd64.whl", hash = "sha256:9699e98862dadefd0bea2360c31fa61ca553c660cbf6fb44993acde1b959f58f", size = 110470, upload-time = "2024-10-18T12:22:04.411Z" }, - { url = "https://files.pythonhosted.org/packages/50/a9/66ca5f252dcac93ca076698591b838ba17f9729591edf4b74fef7fbe1414/pyclipper-1.3.0.post6-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c4247e7c44b34c87acbf38f99d48fb1acaf5da4a2cf4dcd601a9b24d431be4ef", size = 270930, upload-time = "2024-10-18T12:22:06.066Z" }, - { url = "https://files.pythonhosted.org/packages/59/fe/2ab5818b3504e179086e54a37ecc245525d069267b8c31b18ec3d0830cbf/pyclipper-1.3.0.post6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:851b3e58106c62a5534a1201295fe20c21714dee2eda68081b37ddb0367e6caa", size = 143411, upload-time = "2024-10-18T12:22:07.598Z" }, - { url = "https://files.pythonhosted.org/packages/09/f7/b58794f643e033a6d14da7c70f517315c3072f3c5fccdf4232fa8c8090c1/pyclipper-1.3.0.post6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:16cc1705a915896d2aff52131c427df02265631279eac849ebda766432714cc0", size = 951754, upload-time = "2024-10-18T12:22:08.966Z" }, - { url = "https://files.pythonhosted.org/packages/c1/77/846a21957cd4ed266c36705ee340beaa923eb57d2bba013cfd7a5c417cfd/pyclipper-1.3.0.post6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ace1f0753cf71c5c5f6488b8feef5dd0fa8b976ad86b24bb51f708f513df4aac", size = 969608, upload-time = "2024-10-18T12:22:10.321Z" }, - { url = "https://files.pythonhosted.org/packages/c9/2b/580703daa6606d160caf596522d4cfdf62ae619b062a7ce6f905821a57e8/pyclipper-1.3.0.post6-cp311-cp311-win32.whl", hash = "sha256:dbc828641667142751b1127fd5c4291663490cf05689c85be4c5bcc89aaa236a", size = 100227, upload-time = "2024-10-18T12:22:11.991Z" }, - { url = "https://files.pythonhosted.org/packages/17/4b/a4cda18e8556d913ff75052585eb0d658500596b5f97fe8401d05123d47b/pyclipper-1.3.0.post6-cp311-cp311-win_amd64.whl", hash = "sha256:1c03f1ae43b18ee07730c3c774cc3cf88a10c12a4b097239b33365ec24a0a14a", size = 110442, upload-time = "2024-10-18T12:22:13.121Z" }, - { url = "https://files.pythonhosted.org/packages/fc/c8/197d9a1d8354922d24d11d22fb2e0cc1ebc182f8a30496b7ddbe89467ce1/pyclipper-1.3.0.post6-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:6363b9d79ba1b5d8f32d1623e797c1e9f994600943402e68d5266067bdde173e", size = 270487, upload-time = "2024-10-18T12:22:14.852Z" }, - { url = "https://files.pythonhosted.org/packages/8e/8e/eb14eadf054494ad81446e21c4ea163b941747610b0eb9051644395f567e/pyclipper-1.3.0.post6-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:32cd7fb9c1c893eb87f82a072dbb5e26224ea7cebbad9dc306d67e1ac62dd229", size = 143469, upload-time = "2024-10-18T12:22:16.109Z" }, - { url = "https://files.pythonhosted.org/packages/cf/e5/6c4a8df6e904c133bb4c5309d211d31c751db60cbd36a7250c02b05494a1/pyclipper-1.3.0.post6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3aab10e3c10ed8fa60c608fb87c040089b83325c937f98f06450cf9fcfdaf1d", size = 944206, upload-time = "2024-10-18T12:22:17.216Z" }, - { url = "https://files.pythonhosted.org/packages/76/65/cb014acc41cd5bf6bbfa4671c7faffffb9cee01706642c2dec70c5209ac8/pyclipper-1.3.0.post6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58eae2ff92a8cae1331568df076c4c5775bf946afab0068b217f0cf8e188eb3c", size = 963797, upload-time = "2024-10-18T12:22:18.881Z" }, - { url = "https://files.pythonhosted.org/packages/80/ec/b40cd81ab7598984167508a5369a2fa31a09fe3b3e3d0b73aa50e06d4b3f/pyclipper-1.3.0.post6-cp312-cp312-win32.whl", hash = "sha256:793b0aa54b914257aa7dc76b793dd4dcfb3c84011d48df7e41ba02b571616eaf", size = 99456, upload-time = "2024-10-18T12:22:20.084Z" }, - { url = "https://files.pythonhosted.org/packages/24/3a/7d6292e3c94fb6b872d8d7e80d909dc527ee6b0af73b753c63fdde65a7da/pyclipper-1.3.0.post6-cp312-cp312-win_amd64.whl", hash = "sha256:d3f9da96f83b8892504923beb21a481cd4516c19be1d39eb57a92ef1c9a29548", size = 110278, upload-time = "2024-10-18T12:22:21.178Z" }, +version = "1.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f6/21/3c06205bb407e1f79b73b7b4dfb3950bd9537c4f625a68ab5cc41177f5bc/pyclipper-1.4.0.tar.gz", hash = "sha256:9882bd889f27da78add4dd6f881d25697efc740bf840274e749988d25496c8e1", size = 54489, upload-time = "2025-12-01T13:15:35.015Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8a/9f/a10173d32ecc2ce19a04d018163f3ca22a04c0c6ad03b464dcd32f9152a8/pyclipper-1.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:bafad70d2679c187120e8c44e1f9a8b06150bad8c0aecf612ad7dfbfa9510f73", size = 264510, upload-time = "2025-12-01T13:14:46.551Z" }, + { url = "https://files.pythonhosted.org/packages/e0/c2/5490ddc4a1f7ceeaa0258f4266397e720c02db515b2ca5bc69b85676f697/pyclipper-1.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0b74a9dd44b22a7fd35d65fb1ceeba57f3817f34a97a28c3255556362e491447", size = 139498, upload-time = "2025-12-01T13:14:48.31Z" }, + { url = "https://files.pythonhosted.org/packages/3b/0a/bea9102d1d75634b1a5702b0e92982451a1eafca73c4845d3dbe27eba13d/pyclipper-1.4.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0a4d2736fb3c42e8eb1d38bf27a720d1015526c11e476bded55138a977c17d9d", size = 970974, upload-time = "2025-12-01T13:14:49.799Z" }, + { url = "https://files.pythonhosted.org/packages/8b/1b/097f8776d5b3a10eb7b443b632221f4ed825d892e79e05682f4b10a1a59c/pyclipper-1.4.0-cp310-cp310-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b3b3630051b53ad2564cb079e088b112dd576e3d91038338ad1cc7915e0f14dc", size = 943315, upload-time = "2025-12-01T13:14:51.266Z" }, + { url = "https://files.pythonhosted.org/packages/fd/4d/17d6a3f1abf0f368d58f2309e80ee3761afb1fd1342f7780ab32ba4f0b1d/pyclipper-1.4.0-cp310-cp310-win32.whl", hash = "sha256:8d42b07a2f6cfe2d9b87daf345443583f00a14e856927782fde52f3a255e305a", size = 95286, upload-time = "2025-12-01T13:14:52.922Z" }, + { url = "https://files.pythonhosted.org/packages/53/ca/b30138427ed122ec9b47980b943164974a2ec606fa3f71597033b9a9f9a6/pyclipper-1.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:6a97b961f182b92d899ca88c1bb3632faea2e00ce18d07c5f789666ebb021ca4", size = 104227, upload-time = "2025-12-01T13:14:54.013Z" }, + { url = "https://files.pythonhosted.org/packages/de/e3/64cf7794319b088c288706087141e53ac259c7959728303276d18adc665d/pyclipper-1.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:adcb7ca33c5bdc33cd775e8b3eadad54873c802a6d909067a57348bcb96e7a2d", size = 264281, upload-time = "2025-12-01T13:14:55.47Z" }, + { url = "https://files.pythonhosted.org/packages/34/cd/44ec0da0306fa4231e76f1c2cb1fa394d7bde8db490a2b24d55b39865f69/pyclipper-1.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fd24849d2b94ec749ceac7c34c9f01010d23b6e9d9216cf2238b8481160e703d", size = 139426, upload-time = "2025-12-01T13:14:56.683Z" }, + { url = "https://files.pythonhosted.org/packages/ad/88/d8f6c6763ea622fe35e19c75d8b39ed6c55191ddc82d65e06bc46b26cb8e/pyclipper-1.4.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1b6c8d75ba20c6433c9ea8f1a0feb7e4d3ac06a09ad1fd6d571afc1ddf89b869", size = 989649, upload-time = "2025-12-01T13:14:58.28Z" }, + { url = "https://files.pythonhosted.org/packages/ff/e9/ea7d68c8c4af3842d6515bedcf06418610ad75f111e64c92c1d4785a1513/pyclipper-1.4.0-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:58e29d7443d7cc0e83ee9daf43927730386629786d00c63b04fe3b53ac01462c", size = 962842, upload-time = "2025-12-01T13:15:00.044Z" }, + { url = "https://files.pythonhosted.org/packages/4e/b7/0b4a272d8726e51ab05e2b933d8cc47f29757fb8212e38b619e170e6015c/pyclipper-1.4.0-cp311-cp311-win32.whl", hash = "sha256:a8d2b5fb75ebe57e21ce61e79a9131edec2622ff23cc665e4d1d1f201bc1a801", size = 95098, upload-time = "2025-12-01T13:15:01.359Z" }, + { url = "https://files.pythonhosted.org/packages/3a/76/4901de2919198bb2bd3d989f86d4a1dff363962425bb2d63e24e6c990042/pyclipper-1.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:e9b973467d9c5fa9bc30bb6ac95f9f4d7c3d9fc25f6cf2d1cc972088e5955c01", size = 104362, upload-time = "2025-12-01T13:15:02.439Z" }, + { url = "https://files.pythonhosted.org/packages/90/1b/7a07b68e0842324d46c03e512d8eefa9cb92ba2a792b3b4ebf939dafcac3/pyclipper-1.4.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:222ac96c8b8281b53d695b9c4fedc674f56d6d4320ad23f1bdbd168f4e316140", size = 265676, upload-time = "2025-12-01T13:15:04.15Z" }, + { url = "https://files.pythonhosted.org/packages/6b/dd/8bd622521c05d04963420ae6664093f154343ed044c53ea260a310c8bb4d/pyclipper-1.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f3672dbafbb458f1b96e1ee3e610d174acb5ace5bd2ed5d1252603bb797f2fc6", size = 140458, upload-time = "2025-12-01T13:15:05.76Z" }, + { url = "https://files.pythonhosted.org/packages/7a/06/6e3e241882bf7d6ab23d9c69ba4e85f1ec47397cbbeee948a16cf75e21ed/pyclipper-1.4.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d1f807e2b4760a8e5c6d6b4e8c1d71ef52b7fe1946ff088f4fa41e16a881a5ca", size = 978235, upload-time = "2025-12-01T13:15:06.993Z" }, + { url = "https://files.pythonhosted.org/packages/cf/f4/3418c1cd5eea640a9fa2501d4bc0b3655fa8d40145d1a4f484b987990a75/pyclipper-1.4.0-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ce1f83c9a4e10ea3de1959f0ae79e9a5bd41346dff648fee6228ba9eaf8b3872", size = 961388, upload-time = "2025-12-01T13:15:08.467Z" }, + { url = "https://files.pythonhosted.org/packages/ac/94/c85401d24be634af529c962dd5d781f3cb62a67cd769534df2cb3feee97a/pyclipper-1.4.0-cp312-cp312-win32.whl", hash = "sha256:3ef44b64666ebf1cb521a08a60c3e639d21b8c50bfbe846ba7c52a0415e936f4", size = 95169, upload-time = "2025-12-01T13:15:10.098Z" }, + { url = "https://files.pythonhosted.org/packages/97/77/dfea08e3b230b82ee22543c30c35d33d42f846a77f96caf7c504dd54fab1/pyclipper-1.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:d1e5498d883b706a4ce636247f0d830c6eb34a25b843a1b78e2c969754ca9037", size = 104619, upload-time = "2025-12-01T13:15:11.592Z" }, + { url = "https://files.pythonhosted.org/packages/18/59/81050abdc9e5b90ffc2c765738c5e40e9abd8e44864aaa737b600f16c562/pyclipper-1.4.0-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:98b2a40f98e1fc1b29e8a6094072e7e0c7dfe901e573bf6cfc6eb7ce84a7ae87", size = 126495, upload-time = "2025-12-01T13:15:33.743Z" }, ] [[package]] name = "pycocotools" -version = "2.0.10" +version = "2.0.11" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "numpy" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/35/a6/694fd661f0feb5e91f7049a202ea12de312ca9010c33bd9d9f0c63046c01/pycocotools-2.0.10.tar.gz", hash = "sha256:7a47609cdefc95e5e151313c7d93a61cf06e15d42c7ba99b601e3bc0f9ece2e1", size = 25389, upload-time = "2025-06-04T23:37:47.879Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/3f/f8/24082061458ad62df7e2714a631cc047eddfe752970a2e4a7e7977d96905/pycocotools-2.0.10-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:94d558e6a4b92620dad1684b74b6c1404e20d5ed3b4f3aed64ad817d5dd46c72", size = 152202, upload-time = "2025-06-04T23:36:50.026Z" }, - { url = "https://files.pythonhosted.org/packages/fe/45/65819da7579e9018506ed3b5401146a394e89eee84f57592174962f0fba2/pycocotools-2.0.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4d61959f505f1333afd1666ece1a9f8dad318de160c56c7d03f22d7b5556478", size = 445796, upload-time = "2025-06-04T23:36:52.057Z" }, - { url = "https://files.pythonhosted.org/packages/61/d7/32996d713921c504875a4cebf241c182aa37e58daab5c3c4737f539ac0d4/pycocotools-2.0.10-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0bb54826c5d3b651597ec15ae5f4226b727159ec7798af81aa3895f734518993", size = 455015, upload-time = "2025-06-04T23:36:53.93Z" }, - { url = "https://files.pythonhosted.org/packages/fe/5f/91ad9e46ec6709d24a9ed8ac3969f6a550715c08b22f85bc045d1395fdf6/pycocotools-2.0.10-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:9d3b4d0aa38c76153ec244f17939bbc65d24b6a119eb99184f7f636421ef0d8a", size = 464739, upload-time = "2025-06-04T23:36:55.751Z" }, - { url = "https://files.pythonhosted.org/packages/40/e3/9684edbd996a35d8da7c38c1dfc151d6e1bcf66bd32de6fb88f6d2f2bcf5/pycocotools-2.0.10-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:714dda1fccc3a9a1f10893530df6e927678daf6c49bc8a932d7ec2042e9a11f2", size = 481572, upload-time = "2025-06-04T23:36:57.374Z" }, - { url = "https://files.pythonhosted.org/packages/4e/84/1832144e8effe700660489d6e2a7687c99d14c3ea29fa0142dac0e7322d6/pycocotools-2.0.10-cp310-cp310-win_amd64.whl", hash = "sha256:8b4f26d44dde3e0b1e3df3ddcc7e27560e52dfe53db708c26af22a57e8ea3d47", size = 80166, upload-time = "2025-06-04T23:36:59.275Z" }, - { url = "https://files.pythonhosted.org/packages/03/bf/ea288c16d2d2e4da740545f30f7ebf58f2343bcf5e0a7f3e3aef582a116c/pycocotools-2.0.10-cp310-cp310-win_arm64.whl", hash = "sha256:16836530552d6ce5e7f1cbcdfe6ead94c0cee71d61bfa3e3c832aef57d21c027", size = 69633, upload-time = "2025-06-04T23:37:00.527Z" }, - { url = "https://files.pythonhosted.org/packages/ee/36/aebbbddd9c659f1fc9d78daeaf6e39860813bb014b0de873073361ad40f1/pycocotools-2.0.10-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:68846da0ee3ea82d71bcbd99ed28271633a67a899cfbacd2ef309b2e455524b2", size = 155033, upload-time = "2025-06-04T23:37:01.835Z" }, - { url = "https://files.pythonhosted.org/packages/57/c2/e4c96950604c709fbd71c49828968fadd9d8ca8cf74f52be4cd4b2ff9300/pycocotools-2.0.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20831839a771d4bc60a814e7b54a92d9a45a773dee47959d30888d00066059c3", size = 470328, upload-time = "2025-06-04T23:37:03.675Z" }, - { url = "https://files.pythonhosted.org/packages/a7/ec/7827cd9ce6e80f739fab0163ecb3765df54af744a9bab64b0058bdce47ef/pycocotools-2.0.10-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1760c10459dfb4229e7436ae380228428efb0115bbe332a51b72d07fa085d8c0", size = 477331, upload-time = "2025-06-04T23:37:05.703Z" }, - { url = "https://files.pythonhosted.org/packages/81/74/33ce685ae1cd6312b2526f701e43dfeb73d1c860878b72a30ac1cc322536/pycocotools-2.0.10-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5146bc881f380e8fb493e49216083298e4a06f778841f8b9b1d45b21e211d0e4", size = 489735, upload-time = "2025-06-04T23:37:08.488Z" }, - { url = "https://files.pythonhosted.org/packages/17/79/0e02ce700ff9c9fd30e57a84add42bd6fc033e743b76870ef68215d3f3f4/pycocotools-2.0.10-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:23f7d0c551d4c31cab629ce177186db9562f10414320add5267707a84cf6cdfa", size = 507779, upload-time = "2025-06-04T23:37:10.159Z" }, - { url = "https://files.pythonhosted.org/packages/d5/12/00fac39ad26f762c50e5428cc8b3c83de28c5d64b5b858181583522a4e28/pycocotools-2.0.10-cp311-cp311-win_amd64.whl", hash = "sha256:03c3aacec2a6aa5171016303a539d07a7b22a34557456eadf0eb40853bdd813e", size = 80808, upload-time = "2025-06-04T23:37:11.865Z" }, - { url = "https://files.pythonhosted.org/packages/3d/cd/50970a64365f013151086d54d60b40369cf612f117d72cd9d6bd2966932c/pycocotools-2.0.10-cp311-cp311-win_arm64.whl", hash = "sha256:1f942352b1ab11b9732443ab832cbe5836441f4ec30e1f61b44e1421dbb0a0f5", size = 69566, upload-time = "2025-06-04T23:37:13.067Z" }, - { url = "https://files.pythonhosted.org/packages/d7/b4/3b87dce90fc81b8283b2b0e32b22642939e25f3a949581cb6777f5eebb12/pycocotools-2.0.10-cp312-abi3-macosx_10_13_universal2.whl", hash = "sha256:e1359f556986c8c4ac996bf8e473ff891d87630491357aaabd12601687af5edb", size = 142896, upload-time = "2025-06-04T23:37:14.748Z" }, - { url = "https://files.pythonhosted.org/packages/29/d5/b17bb67722432a191cb86121cda33cd8edb4d5b15beda43bc97a7d5ae404/pycocotools-2.0.10-cp312-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:075788c90bfa6a8989d628932854f3e32c25dac3c1bf7c1183cefad29aee16c8", size = 390111, upload-time = "2025-06-04T23:37:16.588Z" }, - { url = "https://files.pythonhosted.org/packages/49/80/912b4c60f94e747dd2c3adbda5d4a4edc1d735fbfa0d91ab2eb231decb5d/pycocotools-2.0.10-cp312-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4539d8b29230de042f574012edd0b5227528da083c4f12bbd6488567aabd3920", size = 397099, upload-time = "2025-06-04T23:37:18.105Z" }, - { url = "https://files.pythonhosted.org/packages/df/d7/b3c2f731252a096bbae1a47cb1bbeab4560620a82585d40cce67eca5f043/pycocotools-2.0.10-cp312-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:da7b339624d0f78aa5bdc1c86a53f2dcb36ae7e10ab5fe45ba69878bb7837c7a", size = 396111, upload-time = "2025-06-04T23:37:20.642Z" }, - { url = "https://files.pythonhosted.org/packages/2c/6f/2eceba57245bfc86174263e12716cbe91b329a3677fbeff246148ce6a664/pycocotools-2.0.10-cp312-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:ffdbf8810f27b32c5c5c85d9cd65e8e066852fef9775e58a7b23abdffeaf8252", size = 416393, upload-time = "2025-06-04T23:37:22.287Z" }, - { url = "https://files.pythonhosted.org/packages/e1/31/d87f781759b2ad177dd6d41c5fe0ce154f14fc8b384e9b80cd21a157395b/pycocotools-2.0.10-cp312-abi3-win_amd64.whl", hash = "sha256:998a88f90bb663548e767470181175343d406b6673b8b9ef5bdbb3a6d3eb3b11", size = 76824, upload-time = "2025-06-04T23:37:23.744Z" }, - { url = "https://files.pythonhosted.org/packages/27/13/7674d61658b58b8310e3de1270bce18f92a6ee8136e54a7e5696d6f72fd4/pycocotools-2.0.10-cp312-abi3-win_arm64.whl", hash = "sha256:76cd86a80171f8f7da3250be0e40d75084f1f1505d376ae0d08ed0be1ba8a90d", size = 64753, upload-time = "2025-06-04T23:37:25.202Z" }, + { name = "numpy", version = "1.26.4", source = { registry = "https://pypi.org/simple" }, marker = "extra == 'extra-16-inference-models-onnx-jp6-cu126' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and extra == 'extra-16-inference-models-falcon-perception') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "2.4.4", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and extra == 'extra-16-inference-models-falcon-perception') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a2/df/32354b5dda963ffdfc8f75c9acf8828ef7890723a4ed57bb3ff2dc1d6f7e/pycocotools-2.0.11.tar.gz", hash = "sha256:34254d76da85576fcaf5c1f3aa9aae16b8cb15418334ba4283b800796bd1993d", size = 25381, upload-time = "2025-12-15T22:31:46.148Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/dd/4b/0c040fcda2c4fa4827b1a64e3185d99d5f954e45cc9463ba7385a1173a77/pycocotools-2.0.11-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:484d33515353186aadba9e2a290d81b107275cdb9565084e31a5568a52a0b120", size = 160351, upload-time = "2025-12-15T22:30:53.998Z" }, + { url = "https://files.pythonhosted.org/packages/49/fe/861db6515824815eaabce27734653a6b100ddb22364b3345dd862b2c5b65/pycocotools-2.0.11-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ca9f120f719ec405ad0c74ccfdb8402b0c37bd5f88ab5b6482a0de2efd5a36f4", size = 463947, upload-time = "2025-12-15T22:30:55.419Z" }, + { url = "https://files.pythonhosted.org/packages/c5/a1/b4b49b85763043372e66baa10dffa42337cf4687d6db22546c27f3a4d732/pycocotools-2.0.11-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e40a3a898c6e5340b8d70cf7984868b9bff8c3d80187de9a3b661d504d665978", size = 472455, upload-time = "2025-12-15T22:30:56.895Z" }, + { url = "https://files.pythonhosted.org/packages/48/70/fac670296e6a2b45eb7434d0480b9af6cb85a8de4f4848b49b01154bc859/pycocotools-2.0.11-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7cd4cdfd2c676f30838aa0b1047441892fb4f97d70bf3df480bcc7a18a64d7d4", size = 457911, upload-time = "2025-12-15T22:30:58.377Z" }, + { url = "https://files.pythonhosted.org/packages/33/f5/6158de63354dfcb677c8da34a4d205cc532e3277338ab7e6dea1310ba8de/pycocotools-2.0.11-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:08c79789fd79e801ae4ecfcfeec32b31e36254e7a2b4019af28c104975d5e730", size = 476472, upload-time = "2025-12-15T22:30:59.736Z" }, + { url = "https://files.pythonhosted.org/packages/fc/01/46d2a782cda19ba1beb7c431f417e1e478f0bf1273fa5fe5d10de7c18d76/pycocotools-2.0.11-cp310-cp310-win_amd64.whl", hash = "sha256:f78cbb1a32d061fcad4bdba083de70a39a21c1c3d9235a3f77d8f007541ec5ef", size = 80165, upload-time = "2025-12-15T22:31:00.886Z" }, + { url = "https://files.pythonhosted.org/packages/ee/5c/6bd945781bb04c2148929183d1d67b05ce07996313b0f87bb88c6a805493/pycocotools-2.0.11-cp310-cp310-win_arm64.whl", hash = "sha256:e21311ea71f85591680d8992858e2d44a2a156dc3b2bf1c5c901c4a19348177b", size = 69358, upload-time = "2025-12-15T22:31:01.815Z" }, + { url = "https://files.pythonhosted.org/packages/b3/3f/41ce3fce61b7721158f21b61727eb054805babc0088cfa48506935b80a36/pycocotools-2.0.11-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:81bdceebb4c64e9265213e2d733808a12f9c18dfb14457323cc6b9af07fa0e61", size = 158947, upload-time = "2025-12-15T22:31:03.291Z" }, + { url = "https://files.pythonhosted.org/packages/e2/9b/a739705b246445bd1376394bf9d1ec2dd292b16740e92f203461b2bb12ed/pycocotools-2.0.11-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a1c05f91ccc658dfe01325267209c4b435da1722c93eeb5749fabc1d087b6882", size = 485174, upload-time = "2025-12-15T22:31:04.395Z" }, + { url = "https://files.pythonhosted.org/packages/34/70/7a12752784e57d8034a76c245c618a2f88a9d2463862b990f314aea7e5d6/pycocotools-2.0.11-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:18ba75ff58cedb33a85ce2c18f1452f1fe20c9dd59925eec5300b2bf6205dbe1", size = 493172, upload-time = "2025-12-15T22:31:05.504Z" }, + { url = "https://files.pythonhosted.org/packages/5c/fc/d703599ac728209dba08aea8d4bee884d5adabfcd9041abed1658d863747/pycocotools-2.0.11-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:693417797f0377fd094eb815c0a1e7d1c3c0251b71e3b3779fce3b3cf24793c5", size = 480506, upload-time = "2025-12-15T22:31:06.77Z" }, + { url = "https://files.pythonhosted.org/packages/81/d9/e1cfc320bbb2cd58c3b4398c3821cbe75d93c16ed3135ac9e774a18a02d3/pycocotools-2.0.11-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b6a07071c441d0f5e480a8f287106191582e40289d4e242dfe684e0c8a751088", size = 497595, upload-time = "2025-12-15T22:31:08.277Z" }, + { url = "https://files.pythonhosted.org/packages/a2/23/d17f6111c2a6ae8631d4fa90202bea05844da715d61431fbc34d276462d5/pycocotools-2.0.11-cp311-cp311-win_amd64.whl", hash = "sha256:8e159232adae3aef6b4e2d37b008bff107b26e9ed3b48e70ea6482302834bd34", size = 80519, upload-time = "2025-12-15T22:31:09.613Z" }, + { url = "https://files.pythonhosted.org/packages/00/4c/76b00b31a724c3f5ccdab0f85e578afb2ca38d33be0a0e98f1770cafd958/pycocotools-2.0.11-cp311-cp311-win_arm64.whl", hash = "sha256:4fc9889e819452b9c142036e1eabac8a13a8bd552d8beba299a57e0da6bfa1ec", size = 69304, upload-time = "2025-12-15T22:31:10.592Z" }, + { url = "https://files.pythonhosted.org/packages/87/12/2f2292332456e4e4aba1dec0e3de8f1fc40fb2f4fdb0ca1cb17db9861682/pycocotools-2.0.11-cp312-abi3-macosx_10_13_universal2.whl", hash = "sha256:a2e9634bc7cadfb01c88e0b98589aaf0bd12983c7927bde93f19c0103e5441f4", size = 147795, upload-time = "2025-12-15T22:31:11.519Z" }, + { url = "https://files.pythonhosted.org/packages/63/3c/68d7ea376aada9046e7ea2d7d0dad0d27e1ae8b4b3c26a28346689390ab2/pycocotools-2.0.11-cp312-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7fd4121766cc057133534679c0ec3f9023dbd96e9b31cf95c86a069ebdac2b65", size = 398434, upload-time = "2025-12-15T22:31:12.558Z" }, + { url = "https://files.pythonhosted.org/packages/23/59/dc81895beff4e1207a829d40d442ea87cefaac9f6499151965f05c479619/pycocotools-2.0.11-cp312-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a82d1c9ed83f75da0b3f244f2a3cf559351a283307bd9b79a4ee2b93ab3231dd", size = 411685, upload-time = "2025-12-15T22:31:13.995Z" }, + { url = "https://files.pythonhosted.org/packages/0b/0b/5a8a7de300862a2eb5e2ecd3cb015126231379206cd3ebba8f025388d770/pycocotools-2.0.11-cp312-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:89e853425018e2c2920ee0f2112cf7c140a1dcf5f4f49abd9c2da112c3e0f4b3", size = 390500, upload-time = "2025-12-15T22:31:15.138Z" }, + { url = "https://files.pythonhosted.org/packages/63/b5/519bb68647f06feea03d5f355c33c05800aeae4e57b9482b2859eb00752e/pycocotools-2.0.11-cp312-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:87af87b8d06d5b852a885a319d9362dca3bed9f8bbcc3feb6513acb1f88ea242", size = 409790, upload-time = "2025-12-15T22:31:16.326Z" }, + { url = "https://files.pythonhosted.org/packages/83/b4/f6708404ff494706b80e714b919f76dc4ec9845a4007affd6d6b0843f928/pycocotools-2.0.11-cp312-abi3-win_amd64.whl", hash = "sha256:ffe806ce535f5996445188f9a35643791dc54beabc61bd81e2b03367356d604f", size = 77570, upload-time = "2025-12-15T22:31:17.703Z" }, + { url = "https://files.pythonhosted.org/packages/6e/63/778cd0ddc9d4a78915ac0a72b56d7fb204f7c3fabdad067d67ea0089762e/pycocotools-2.0.11-cp312-abi3-win_arm64.whl", hash = "sha256:c230f5e7b14bd19085217b4f40bba81bf14a182b150b8e9fab1c15d504ade343", size = 64564, upload-time = "2025-12-15T22:31:18.652Z" }, ] [[package]] name = "pycparser" -version = "2.22" +version = "3.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/1d/b2/31537cf4b1ca988837256c910a668b553fceb8f069bedc4b1c826024b52c/pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6", size = 172736, upload-time = "2024-03-30T13:22:22.564Z" } +sdist = { url = "https://files.pythonhosted.org/packages/1b/7d/92392ff7815c21062bea51aa7b87d45576f649f16458d78b7cf94b9ab2e6/pycparser-3.0.tar.gz", hash = "sha256:600f49d217304a5902ac3c37e1281c9fe94e4d0489de643a9504c5cdfdfc6b29", size = 103492, upload-time = "2026-01-21T14:26:51.89Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc", size = 117552, upload-time = "2024-03-30T13:22:20.476Z" }, + { url = "https://files.pythonhosted.org/packages/0c/c3/44f3fbbfa403ea2a7c779186dc20772604442dde72947e7d01069cbe98e3/pycparser-3.0-py3-none-any.whl", hash = "sha256:b727414169a36b7d524c1c3e31839a521725078d7b2ff038656844266160a992", size = 48172, upload-time = "2026-01-21T14:26:50.693Z" }, ] [[package]] name = "pycuda" -version = "2025.1.1" +version = "2025.1.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "mako" }, { name = "platformdirs" }, { name = "pytools" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a4/18/1da9464c86c3e59660d40515a93904f9b32726ee62bddb07491a39a743c5/pycuda-2025.1.1.tar.gz", hash = "sha256:bab0678d43f469c865f5fe6024f481c781ec51fefe6686acd66c672bfabea34f", size = 1689662, upload-time = "2025-06-08T20:27:29.578Z" } +sdist = { url = "https://files.pythonhosted.org/packages/09/07/2b1eea34f1b620db10fe05b50d8d7620e858fe2c42da984f07e49021a1e3/pycuda-2025.1.3.tar.gz", hash = "sha256:ff16d807b4601bb8a5c3adadb6a4726774e5dd8ffdf61c9b23a41858748fd77a", size = 1690651, upload-time = "2026-02-18T17:52:25.127Z" } [[package]] name = "pydantic" -version = "2.11.7" +version = "2.12.5" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "annotated-types" }, @@ -3154,130 +11113,154 @@ dependencies = [ { name = "typing-extensions" }, { name = "typing-inspection" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/00/dd/4325abf92c39ba8623b5af936ddb36ffcfe0beae70405d456ab1fb2f5b8c/pydantic-2.11.7.tar.gz", hash = "sha256:d989c3c6cb79469287b1569f7447a17848c998458d49ebe294e975b9baf0f0db", size = 788350, upload-time = "2025-06-14T08:33:17.137Z" } +sdist = { url = "https://files.pythonhosted.org/packages/69/44/36f1a6e523abc58ae5f928898e4aca2e0ea509b5aa6f6f392a5d882be928/pydantic-2.12.5.tar.gz", hash = "sha256:4d351024c75c0f085a9febbb665ce8c0c6ec5d30e903bdb6394b7ede26aebb49", size = 821591, upload-time = "2025-11-26T15:11:46.471Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/6a/c0/ec2b1c8712ca690e5d61979dee872603e92b8a32f94cc1b72d53beab008a/pydantic-2.11.7-py3-none-any.whl", hash = "sha256:dde5df002701f6de26248661f6835bbe296a47bf73990135c7d07ce741b9623b", size = 444782, upload-time = "2025-06-14T08:33:14.905Z" }, + { url = "https://files.pythonhosted.org/packages/5a/87/b70ad306ebb6f9b585f114d0ac2137d792b48be34d732d60e597c2f8465a/pydantic-2.12.5-py3-none-any.whl", hash = "sha256:e561593fccf61e8a20fc46dfc2dfe075b8be7d0188df33f221ad1f0139180f9d", size = 463580, upload-time = "2025-11-26T15:11:44.605Z" }, ] [[package]] name = "pydantic-core" -version = "2.33.2" +version = "2.41.5" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ad/88/5f2260bdfae97aabf98f1778d43f69574390ad787afb646292a638c923d4/pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc", size = 435195, upload-time = "2025-04-23T18:33:52.104Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e5/92/b31726561b5dae176c2d2c2dc43a9c5bfba5d32f96f8b4c0a600dd492447/pydantic_core-2.33.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2b3d326aaef0c0399d9afffeb6367d5e26ddc24d351dbc9c636840ac355dc5d8", size = 2028817, upload-time = "2025-04-23T18:30:43.919Z" }, - { url = "https://files.pythonhosted.org/packages/a3/44/3f0b95fafdaca04a483c4e685fe437c6891001bf3ce8b2fded82b9ea3aa1/pydantic_core-2.33.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e5b2671f05ba48b94cb90ce55d8bdcaaedb8ba00cc5359f6810fc918713983d", size = 1861357, upload-time = "2025-04-23T18:30:46.372Z" }, - { url = "https://files.pythonhosted.org/packages/30/97/e8f13b55766234caae05372826e8e4b3b96e7b248be3157f53237682e43c/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0069c9acc3f3981b9ff4cdfaf088e98d83440a4c7ea1bc07460af3d4dc22e72d", size = 1898011, upload-time = "2025-04-23T18:30:47.591Z" }, - { url = "https://files.pythonhosted.org/packages/9b/a3/99c48cf7bafc991cc3ee66fd544c0aae8dc907b752f1dad2d79b1b5a471f/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d53b22f2032c42eaaf025f7c40c2e3b94568ae077a606f006d206a463bc69572", size = 1982730, upload-time = "2025-04-23T18:30:49.328Z" }, - { url = "https://files.pythonhosted.org/packages/de/8e/a5b882ec4307010a840fb8b58bd9bf65d1840c92eae7534c7441709bf54b/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0405262705a123b7ce9f0b92f123334d67b70fd1f20a9372b907ce1080c7ba02", size = 2136178, upload-time = "2025-04-23T18:30:50.907Z" }, - { url = "https://files.pythonhosted.org/packages/e4/bb/71e35fc3ed05af6834e890edb75968e2802fe98778971ab5cba20a162315/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b25d91e288e2c4e0662b8038a28c6a07eaac3e196cfc4ff69de4ea3db992a1b", size = 2736462, upload-time = "2025-04-23T18:30:52.083Z" }, - { url = "https://files.pythonhosted.org/packages/31/0d/c8f7593e6bc7066289bbc366f2235701dcbebcd1ff0ef8e64f6f239fb47d/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bdfe4b3789761f3bcb4b1ddf33355a71079858958e3a552f16d5af19768fef2", size = 2005652, upload-time = "2025-04-23T18:30:53.389Z" }, - { url = "https://files.pythonhosted.org/packages/d2/7a/996d8bd75f3eda405e3dd219ff5ff0a283cd8e34add39d8ef9157e722867/pydantic_core-2.33.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:efec8db3266b76ef9607c2c4c419bdb06bf335ae433b80816089ea7585816f6a", size = 2113306, upload-time = "2025-04-23T18:30:54.661Z" }, - { url = "https://files.pythonhosted.org/packages/ff/84/daf2a6fb2db40ffda6578a7e8c5a6e9c8affb251a05c233ae37098118788/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:031c57d67ca86902726e0fae2214ce6770bbe2f710dc33063187a68744a5ecac", size = 2073720, upload-time = "2025-04-23T18:30:56.11Z" }, - { url = "https://files.pythonhosted.org/packages/77/fb/2258da019f4825128445ae79456a5499c032b55849dbd5bed78c95ccf163/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:f8de619080e944347f5f20de29a975c2d815d9ddd8be9b9b7268e2e3ef68605a", size = 2244915, upload-time = "2025-04-23T18:30:57.501Z" }, - { url = "https://files.pythonhosted.org/packages/d8/7a/925ff73756031289468326e355b6fa8316960d0d65f8b5d6b3a3e7866de7/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:73662edf539e72a9440129f231ed3757faab89630d291b784ca99237fb94db2b", size = 2241884, upload-time = "2025-04-23T18:30:58.867Z" }, - { url = "https://files.pythonhosted.org/packages/0b/b0/249ee6d2646f1cdadcb813805fe76265745c4010cf20a8eba7b0e639d9b2/pydantic_core-2.33.2-cp310-cp310-win32.whl", hash = "sha256:0a39979dcbb70998b0e505fb1556a1d550a0781463ce84ebf915ba293ccb7e22", size = 1910496, upload-time = "2025-04-23T18:31:00.078Z" }, - { url = "https://files.pythonhosted.org/packages/66/ff/172ba8f12a42d4b552917aa65d1f2328990d3ccfc01d5b7c943ec084299f/pydantic_core-2.33.2-cp310-cp310-win_amd64.whl", hash = "sha256:b0379a2b24882fef529ec3b4987cb5d003b9cda32256024e6fe1586ac45fc640", size = 1955019, upload-time = "2025-04-23T18:31:01.335Z" }, - { url = "https://files.pythonhosted.org/packages/3f/8d/71db63483d518cbbf290261a1fc2839d17ff89fce7089e08cad07ccfce67/pydantic_core-2.33.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4c5b0a576fb381edd6d27f0a85915c6daf2f8138dc5c267a57c08a62900758c7", size = 2028584, upload-time = "2025-04-23T18:31:03.106Z" }, - { url = "https://files.pythonhosted.org/packages/24/2f/3cfa7244ae292dd850989f328722d2aef313f74ffc471184dc509e1e4e5a/pydantic_core-2.33.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e799c050df38a639db758c617ec771fd8fb7a5f8eaaa4b27b101f266b216a246", size = 1855071, upload-time = "2025-04-23T18:31:04.621Z" }, - { url = "https://files.pythonhosted.org/packages/b3/d3/4ae42d33f5e3f50dd467761304be2fa0a9417fbf09735bc2cce003480f2a/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc46a01bf8d62f227d5ecee74178ffc448ff4e5197c756331f71efcc66dc980f", size = 1897823, upload-time = "2025-04-23T18:31:06.377Z" }, - { url = "https://files.pythonhosted.org/packages/f4/f3/aa5976e8352b7695ff808599794b1fba2a9ae2ee954a3426855935799488/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a144d4f717285c6d9234a66778059f33a89096dfb9b39117663fd8413d582dcc", size = 1983792, upload-time = "2025-04-23T18:31:07.93Z" }, - { url = "https://files.pythonhosted.org/packages/d5/7a/cda9b5a23c552037717f2b2a5257e9b2bfe45e687386df9591eff7b46d28/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73cf6373c21bc80b2e0dc88444f41ae60b2f070ed02095754eb5a01df12256de", size = 2136338, upload-time = "2025-04-23T18:31:09.283Z" }, - { url = "https://files.pythonhosted.org/packages/2b/9f/b8f9ec8dd1417eb9da784e91e1667d58a2a4a7b7b34cf4af765ef663a7e5/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dc625f4aa79713512d1976fe9f0bc99f706a9dee21dfd1810b4bbbf228d0e8a", size = 2730998, upload-time = "2025-04-23T18:31:11.7Z" }, - { url = "https://files.pythonhosted.org/packages/47/bc/cd720e078576bdb8255d5032c5d63ee5c0bf4b7173dd955185a1d658c456/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b21b5549499972441da4758d662aeea93f1923f953e9cbaff14b8b9565aef", size = 2003200, upload-time = "2025-04-23T18:31:13.536Z" }, - { url = "https://files.pythonhosted.org/packages/ca/22/3602b895ee2cd29d11a2b349372446ae9727c32e78a94b3d588a40fdf187/pydantic_core-2.33.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bdc25f3681f7b78572699569514036afe3c243bc3059d3942624e936ec93450e", size = 2113890, upload-time = "2025-04-23T18:31:15.011Z" }, - { url = "https://files.pythonhosted.org/packages/ff/e6/e3c5908c03cf00d629eb38393a98fccc38ee0ce8ecce32f69fc7d7b558a7/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fe5b32187cbc0c862ee201ad66c30cf218e5ed468ec8dc1cf49dec66e160cc4d", size = 2073359, upload-time = "2025-04-23T18:31:16.393Z" }, - { url = "https://files.pythonhosted.org/packages/12/e7/6a36a07c59ebefc8777d1ffdaf5ae71b06b21952582e4b07eba88a421c79/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:bc7aee6f634a6f4a95676fcb5d6559a2c2a390330098dba5e5a5f28a2e4ada30", size = 2245883, upload-time = "2025-04-23T18:31:17.892Z" }, - { url = "https://files.pythonhosted.org/packages/16/3f/59b3187aaa6cc0c1e6616e8045b284de2b6a87b027cce2ffcea073adf1d2/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:235f45e5dbcccf6bd99f9f472858849f73d11120d76ea8707115415f8e5ebebf", size = 2241074, upload-time = "2025-04-23T18:31:19.205Z" }, - { url = "https://files.pythonhosted.org/packages/e0/ed/55532bb88f674d5d8f67ab121a2a13c385df382de2a1677f30ad385f7438/pydantic_core-2.33.2-cp311-cp311-win32.whl", hash = "sha256:6368900c2d3ef09b69cb0b913f9f8263b03786e5b2a387706c5afb66800efd51", size = 1910538, upload-time = "2025-04-23T18:31:20.541Z" }, - { url = "https://files.pythonhosted.org/packages/fe/1b/25b7cccd4519c0b23c2dd636ad39d381abf113085ce4f7bec2b0dc755eb1/pydantic_core-2.33.2-cp311-cp311-win_amd64.whl", hash = "sha256:1e063337ef9e9820c77acc768546325ebe04ee38b08703244c1309cccc4f1bab", size = 1952909, upload-time = "2025-04-23T18:31:22.371Z" }, - { url = "https://files.pythonhosted.org/packages/49/a9/d809358e49126438055884c4366a1f6227f0f84f635a9014e2deb9b9de54/pydantic_core-2.33.2-cp311-cp311-win_arm64.whl", hash = "sha256:6b99022f1d19bc32a4c2a0d544fc9a76e3be90f0b3f4af413f87d38749300e65", size = 1897786, upload-time = "2025-04-23T18:31:24.161Z" }, - { url = "https://files.pythonhosted.org/packages/18/8a/2b41c97f554ec8c71f2a8a5f85cb56a8b0956addfe8b0efb5b3d77e8bdc3/pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc", size = 2009000, upload-time = "2025-04-23T18:31:25.863Z" }, - { url = "https://files.pythonhosted.org/packages/a1/02/6224312aacb3c8ecbaa959897af57181fb6cf3a3d7917fd44d0f2917e6f2/pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7", size = 1847996, upload-time = "2025-04-23T18:31:27.341Z" }, - { url = "https://files.pythonhosted.org/packages/d6/46/6dcdf084a523dbe0a0be59d054734b86a981726f221f4562aed313dbcb49/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025", size = 1880957, upload-time = "2025-04-23T18:31:28.956Z" }, - { url = "https://files.pythonhosted.org/packages/ec/6b/1ec2c03837ac00886ba8160ce041ce4e325b41d06a034adbef11339ae422/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011", size = 1964199, upload-time = "2025-04-23T18:31:31.025Z" }, - { url = "https://files.pythonhosted.org/packages/2d/1d/6bf34d6adb9debd9136bd197ca72642203ce9aaaa85cfcbfcf20f9696e83/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f", size = 2120296, upload-time = "2025-04-23T18:31:32.514Z" }, - { url = "https://files.pythonhosted.org/packages/e0/94/2bd0aaf5a591e974b32a9f7123f16637776c304471a0ab33cf263cf5591a/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88", size = 2676109, upload-time = "2025-04-23T18:31:33.958Z" }, - { url = "https://files.pythonhosted.org/packages/f9/41/4b043778cf9c4285d59742281a769eac371b9e47e35f98ad321349cc5d61/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1", size = 2002028, upload-time = "2025-04-23T18:31:39.095Z" }, - { url = "https://files.pythonhosted.org/packages/cb/d5/7bb781bf2748ce3d03af04d5c969fa1308880e1dca35a9bd94e1a96a922e/pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b", size = 2100044, upload-time = "2025-04-23T18:31:41.034Z" }, - { url = "https://files.pythonhosted.org/packages/fe/36/def5e53e1eb0ad896785702a5bbfd25eed546cdcf4087ad285021a90ed53/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1", size = 2058881, upload-time = "2025-04-23T18:31:42.757Z" }, - { url = "https://files.pythonhosted.org/packages/01/6c/57f8d70b2ee57fc3dc8b9610315949837fa8c11d86927b9bb044f8705419/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6", size = 2227034, upload-time = "2025-04-23T18:31:44.304Z" }, - { url = "https://files.pythonhosted.org/packages/27/b9/9c17f0396a82b3d5cbea4c24d742083422639e7bb1d5bf600e12cb176a13/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea", size = 2234187, upload-time = "2025-04-23T18:31:45.891Z" }, - { url = "https://files.pythonhosted.org/packages/b0/6a/adf5734ffd52bf86d865093ad70b2ce543415e0e356f6cacabbc0d9ad910/pydantic_core-2.33.2-cp312-cp312-win32.whl", hash = "sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290", size = 1892628, upload-time = "2025-04-23T18:31:47.819Z" }, - { url = "https://files.pythonhosted.org/packages/43/e4/5479fecb3606c1368d496a825d8411e126133c41224c1e7238be58b87d7e/pydantic_core-2.33.2-cp312-cp312-win_amd64.whl", hash = "sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2", size = 1955866, upload-time = "2025-04-23T18:31:49.635Z" }, - { url = "https://files.pythonhosted.org/packages/0d/24/8b11e8b3e2be9dd82df4b11408a67c61bb4dc4f8e11b5b0fc888b38118b5/pydantic_core-2.33.2-cp312-cp312-win_arm64.whl", hash = "sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab", size = 1888894, upload-time = "2025-04-23T18:31:51.609Z" }, - { url = "https://files.pythonhosted.org/packages/30/68/373d55e58b7e83ce371691f6eaa7175e3a24b956c44628eb25d7da007917/pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5c4aa4e82353f65e548c476b37e64189783aa5384903bfea4f41580f255fddfa", size = 2023982, upload-time = "2025-04-23T18:32:53.14Z" }, - { url = "https://files.pythonhosted.org/packages/a4/16/145f54ac08c96a63d8ed6442f9dec17b2773d19920b627b18d4f10a061ea/pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d946c8bf0d5c24bf4fe333af284c59a19358aa3ec18cb3dc4370080da1e8ad29", size = 1858412, upload-time = "2025-04-23T18:32:55.52Z" }, - { url = "https://files.pythonhosted.org/packages/41/b1/c6dc6c3e2de4516c0bb2c46f6a373b91b5660312342a0cf5826e38ad82fa/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87b31b6846e361ef83fedb187bb5b4372d0da3f7e28d85415efa92d6125d6e6d", size = 1892749, upload-time = "2025-04-23T18:32:57.546Z" }, - { url = "https://files.pythonhosted.org/packages/12/73/8cd57e20afba760b21b742106f9dbdfa6697f1570b189c7457a1af4cd8a0/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa9d91b338f2df0508606f7009fde642391425189bba6d8c653afd80fd6bb64e", size = 2067527, upload-time = "2025-04-23T18:32:59.771Z" }, - { url = "https://files.pythonhosted.org/packages/e3/d5/0bb5d988cc019b3cba4a78f2d4b3854427fc47ee8ec8e9eaabf787da239c/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2058a32994f1fde4ca0480ab9d1e75a0e8c87c22b53a3ae66554f9af78f2fe8c", size = 2108225, upload-time = "2025-04-23T18:33:04.51Z" }, - { url = "https://files.pythonhosted.org/packages/f1/c5/00c02d1571913d496aabf146106ad8239dc132485ee22efe08085084ff7c/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:0e03262ab796d986f978f79c943fc5f620381be7287148b8010b4097f79a39ec", size = 2069490, upload-time = "2025-04-23T18:33:06.391Z" }, - { url = "https://files.pythonhosted.org/packages/22/a8/dccc38768274d3ed3a59b5d06f59ccb845778687652daa71df0cab4040d7/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1a8695a8d00c73e50bff9dfda4d540b7dee29ff9b8053e38380426a85ef10052", size = 2237525, upload-time = "2025-04-23T18:33:08.44Z" }, - { url = "https://files.pythonhosted.org/packages/d4/e7/4f98c0b125dda7cf7ccd14ba936218397b44f50a56dd8c16a3091df116c3/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:fa754d1850735a0b0e03bcffd9d4b4343eb417e47196e4485d9cca326073a42c", size = 2238446, upload-time = "2025-04-23T18:33:10.313Z" }, - { url = "https://files.pythonhosted.org/packages/ce/91/2ec36480fdb0b783cd9ef6795753c1dea13882f2e68e73bce76ae8c21e6a/pydantic_core-2.33.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a11c8d26a50bfab49002947d3d237abe4d9e4b5bdc8846a63537b6488e197808", size = 2066678, upload-time = "2025-04-23T18:33:12.224Z" }, - { url = "https://files.pythonhosted.org/packages/7b/27/d4ae6487d73948d6f20dddcd94be4ea43e74349b56eba82e9bdee2d7494c/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:dd14041875d09cc0f9308e37a6f8b65f5585cf2598a53aa0123df8b129d481f8", size = 2025200, upload-time = "2025-04-23T18:33:14.199Z" }, - { url = "https://files.pythonhosted.org/packages/f1/b8/b3cb95375f05d33801024079b9392a5ab45267a63400bf1866e7ce0f0de4/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d87c561733f66531dced0da6e864f44ebf89a8fba55f31407b00c2f7f9449593", size = 1859123, upload-time = "2025-04-23T18:33:16.555Z" }, - { url = "https://files.pythonhosted.org/packages/05/bc/0d0b5adeda59a261cd30a1235a445bf55c7e46ae44aea28f7bd6ed46e091/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f82865531efd18d6e07a04a17331af02cb7a651583c418df8266f17a63c6612", size = 1892852, upload-time = "2025-04-23T18:33:18.513Z" }, - { url = "https://files.pythonhosted.org/packages/3e/11/d37bdebbda2e449cb3f519f6ce950927b56d62f0b84fd9cb9e372a26a3d5/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bfb5112df54209d820d7bf9317c7a6c9025ea52e49f46b6a2060104bba37de7", size = 2067484, upload-time = "2025-04-23T18:33:20.475Z" }, - { url = "https://files.pythonhosted.org/packages/8c/55/1f95f0a05ce72ecb02a8a8a1c3be0579bbc29b1d5ab68f1378b7bebc5057/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:64632ff9d614e5eecfb495796ad51b0ed98c453e447a76bcbeeb69615079fc7e", size = 2108896, upload-time = "2025-04-23T18:33:22.501Z" }, - { url = "https://files.pythonhosted.org/packages/53/89/2b2de6c81fa131f423246a9109d7b2a375e83968ad0800d6e57d0574629b/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f889f7a40498cc077332c7ab6b4608d296d852182211787d4f3ee377aaae66e8", size = 2069475, upload-time = "2025-04-23T18:33:24.528Z" }, - { url = "https://files.pythonhosted.org/packages/b8/e9/1f7efbe20d0b2b10f6718944b5d8ece9152390904f29a78e68d4e7961159/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:de4b83bb311557e439b9e186f733f6c645b9417c84e2eb8203f3f820a4b988bf", size = 2239013, upload-time = "2025-04-23T18:33:26.621Z" }, - { url = "https://files.pythonhosted.org/packages/3c/b2/5309c905a93811524a49b4e031e9851a6b00ff0fb668794472ea7746b448/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:82f68293f055f51b51ea42fafc74b6aad03e70e191799430b90c13d643059ebb", size = 2238715, upload-time = "2025-04-23T18:33:28.656Z" }, - { url = "https://files.pythonhosted.org/packages/32/56/8a7ca5d2cd2cda1d245d34b1c9a942920a718082ae8e54e5f3e5a58b7add/pydantic_core-2.33.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:329467cecfb529c925cf2bbd4d60d2c509bc2fb52a20c1045bf09bb70971a9c1", size = 2066757, upload-time = "2025-04-23T18:33:30.645Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/71/70/23b021c950c2addd24ec408e9ab05d59b035b39d97cdc1130e1bce647bb6/pydantic_core-2.41.5.tar.gz", hash = "sha256:08daa51ea16ad373ffd5e7606252cc32f07bc72b28284b6bc9c6df804816476e", size = 460952, upload-time = "2025-11-04T13:43:49.098Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c6/90/32c9941e728d564b411d574d8ee0cf09b12ec978cb22b294995bae5549a5/pydantic_core-2.41.5-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:77b63866ca88d804225eaa4af3e664c5faf3568cea95360d21f4725ab6e07146", size = 2107298, upload-time = "2025-11-04T13:39:04.116Z" }, + { url = "https://files.pythonhosted.org/packages/fb/a8/61c96a77fe28993d9a6fb0f4127e05430a267b235a124545d79fea46dd65/pydantic_core-2.41.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:dfa8a0c812ac681395907e71e1274819dec685fec28273a28905df579ef137e2", size = 1901475, upload-time = "2025-11-04T13:39:06.055Z" }, + { url = "https://files.pythonhosted.org/packages/5d/b6/338abf60225acc18cdc08b4faef592d0310923d19a87fba1faf05af5346e/pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5921a4d3ca3aee735d9fd163808f5e8dd6c6972101e4adbda9a4667908849b97", size = 1918815, upload-time = "2025-11-04T13:39:10.41Z" }, + { url = "https://files.pythonhosted.org/packages/d1/1c/2ed0433e682983d8e8cba9c8d8ef274d4791ec6a6f24c58935b90e780e0a/pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e25c479382d26a2a41b7ebea1043564a937db462816ea07afa8a44c0866d52f9", size = 2065567, upload-time = "2025-11-04T13:39:12.244Z" }, + { url = "https://files.pythonhosted.org/packages/b3/24/cf84974ee7d6eae06b9e63289b7b8f6549d416b5c199ca2d7ce13bbcf619/pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f547144f2966e1e16ae626d8ce72b4cfa0caedc7fa28052001c94fb2fcaa1c52", size = 2230442, upload-time = "2025-11-04T13:39:13.962Z" }, + { url = "https://files.pythonhosted.org/packages/fd/21/4e287865504b3edc0136c89c9c09431be326168b1eb7841911cbc877a995/pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f52298fbd394f9ed112d56f3d11aabd0d5bd27beb3084cc3d8ad069483b8941", size = 2350956, upload-time = "2025-11-04T13:39:15.889Z" }, + { url = "https://files.pythonhosted.org/packages/a8/76/7727ef2ffa4b62fcab916686a68a0426b9b790139720e1934e8ba797e238/pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:100baa204bb412b74fe285fb0f3a385256dad1d1879f0a5cb1499ed2e83d132a", size = 2068253, upload-time = "2025-11-04T13:39:17.403Z" }, + { url = "https://files.pythonhosted.org/packages/d5/8c/a4abfc79604bcb4c748e18975c44f94f756f08fb04218d5cb87eb0d3a63e/pydantic_core-2.41.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:05a2c8852530ad2812cb7914dc61a1125dc4e06252ee98e5638a12da6cc6fb6c", size = 2177050, upload-time = "2025-11-04T13:39:19.351Z" }, + { url = "https://files.pythonhosted.org/packages/67/b1/de2e9a9a79b480f9cb0b6e8b6ba4c50b18d4e89852426364c66aa82bb7b3/pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:29452c56df2ed968d18d7e21f4ab0ac55e71dc59524872f6fc57dcf4a3249ed2", size = 2147178, upload-time = "2025-11-04T13:39:21Z" }, + { url = "https://files.pythonhosted.org/packages/16/c1/dfb33f837a47b20417500efaa0378adc6635b3c79e8369ff7a03c494b4ac/pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:d5160812ea7a8a2ffbe233d8da666880cad0cbaf5d4de74ae15c313213d62556", size = 2341833, upload-time = "2025-11-04T13:39:22.606Z" }, + { url = "https://files.pythonhosted.org/packages/47/36/00f398642a0f4b815a9a558c4f1dca1b4020a7d49562807d7bc9ff279a6c/pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:df3959765b553b9440adfd3c795617c352154e497a4eaf3752555cfb5da8fc49", size = 2321156, upload-time = "2025-11-04T13:39:25.843Z" }, + { url = "https://files.pythonhosted.org/packages/7e/70/cad3acd89fde2010807354d978725ae111ddf6d0ea46d1ea1775b5c1bd0c/pydantic_core-2.41.5-cp310-cp310-win32.whl", hash = "sha256:1f8d33a7f4d5a7889e60dc39856d76d09333d8a6ed0f5f1190635cbec70ec4ba", size = 1989378, upload-time = "2025-11-04T13:39:27.92Z" }, + { url = "https://files.pythonhosted.org/packages/76/92/d338652464c6c367e5608e4488201702cd1cbb0f33f7b6a85a60fe5f3720/pydantic_core-2.41.5-cp310-cp310-win_amd64.whl", hash = "sha256:62de39db01b8d593e45871af2af9e497295db8d73b085f6bfd0b18c83c70a8f9", size = 2013622, upload-time = "2025-11-04T13:39:29.848Z" }, + { url = "https://files.pythonhosted.org/packages/e8/72/74a989dd9f2084b3d9530b0915fdda64ac48831c30dbf7c72a41a5232db8/pydantic_core-2.41.5-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:a3a52f6156e73e7ccb0f8cced536adccb7042be67cb45f9562e12b319c119da6", size = 2105873, upload-time = "2025-11-04T13:39:31.373Z" }, + { url = "https://files.pythonhosted.org/packages/12/44/37e403fd9455708b3b942949e1d7febc02167662bf1a7da5b78ee1ea2842/pydantic_core-2.41.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7f3bf998340c6d4b0c9a2f02d6a400e51f123b59565d74dc60d252ce888c260b", size = 1899826, upload-time = "2025-11-04T13:39:32.897Z" }, + { url = "https://files.pythonhosted.org/packages/33/7f/1d5cab3ccf44c1935a359d51a8a2a9e1a654b744b5e7f80d41b88d501eec/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:378bec5c66998815d224c9ca994f1e14c0c21cb95d2f52b6021cc0b2a58f2a5a", size = 1917869, upload-time = "2025-11-04T13:39:34.469Z" }, + { url = "https://files.pythonhosted.org/packages/6e/6a/30d94a9674a7fe4f4744052ed6c5e083424510be1e93da5bc47569d11810/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e7b576130c69225432866fe2f4a469a85a54ade141d96fd396dffcf607b558f8", size = 2063890, upload-time = "2025-11-04T13:39:36.053Z" }, + { url = "https://files.pythonhosted.org/packages/50/be/76e5d46203fcb2750e542f32e6c371ffa9b8ad17364cf94bb0818dbfb50c/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6cb58b9c66f7e4179a2d5e0f849c48eff5c1fca560994d6eb6543abf955a149e", size = 2229740, upload-time = "2025-11-04T13:39:37.753Z" }, + { url = "https://files.pythonhosted.org/packages/d3/ee/fed784df0144793489f87db310a6bbf8118d7b630ed07aa180d6067e653a/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:88942d3a3dff3afc8288c21e565e476fc278902ae4d6d134f1eeda118cc830b1", size = 2350021, upload-time = "2025-11-04T13:39:40.94Z" }, + { url = "https://files.pythonhosted.org/packages/c8/be/8fed28dd0a180dca19e72c233cbf58efa36df055e5b9d90d64fd1740b828/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f31d95a179f8d64d90f6831d71fa93290893a33148d890ba15de25642c5d075b", size = 2066378, upload-time = "2025-11-04T13:39:42.523Z" }, + { url = "https://files.pythonhosted.org/packages/b0/3b/698cf8ae1d536a010e05121b4958b1257f0b5522085e335360e53a6b1c8b/pydantic_core-2.41.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c1df3d34aced70add6f867a8cf413e299177e0c22660cc767218373d0779487b", size = 2175761, upload-time = "2025-11-04T13:39:44.553Z" }, + { url = "https://files.pythonhosted.org/packages/b8/ba/15d537423939553116dea94ce02f9c31be0fa9d0b806d427e0308ec17145/pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4009935984bd36bd2c774e13f9a09563ce8de4abaa7226f5108262fa3e637284", size = 2146303, upload-time = "2025-11-04T13:39:46.238Z" }, + { url = "https://files.pythonhosted.org/packages/58/7f/0de669bf37d206723795f9c90c82966726a2ab06c336deba4735b55af431/pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:34a64bc3441dc1213096a20fe27e8e128bd3ff89921706e83c0b1ac971276594", size = 2340355, upload-time = "2025-11-04T13:39:48.002Z" }, + { url = "https://files.pythonhosted.org/packages/e5/de/e7482c435b83d7e3c3ee5ee4451f6e8973cff0eb6007d2872ce6383f6398/pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c9e19dd6e28fdcaa5a1de679aec4141f691023916427ef9bae8584f9c2fb3b0e", size = 2319875, upload-time = "2025-11-04T13:39:49.705Z" }, + { url = "https://files.pythonhosted.org/packages/fe/e6/8c9e81bb6dd7560e33b9053351c29f30c8194b72f2d6932888581f503482/pydantic_core-2.41.5-cp311-cp311-win32.whl", hash = "sha256:2c010c6ded393148374c0f6f0bf89d206bf3217f201faa0635dcd56bd1520f6b", size = 1987549, upload-time = "2025-11-04T13:39:51.842Z" }, + { url = "https://files.pythonhosted.org/packages/11/66/f14d1d978ea94d1bc21fc98fcf570f9542fe55bfcc40269d4e1a21c19bf7/pydantic_core-2.41.5-cp311-cp311-win_amd64.whl", hash = "sha256:76ee27c6e9c7f16f47db7a94157112a2f3a00e958bc626e2f4ee8bec5c328fbe", size = 2011305, upload-time = "2025-11-04T13:39:53.485Z" }, + { url = "https://files.pythonhosted.org/packages/56/d8/0e271434e8efd03186c5386671328154ee349ff0354d83c74f5caaf096ed/pydantic_core-2.41.5-cp311-cp311-win_arm64.whl", hash = "sha256:4bc36bbc0b7584de96561184ad7f012478987882ebf9f9c389b23f432ea3d90f", size = 1972902, upload-time = "2025-11-04T13:39:56.488Z" }, + { url = "https://files.pythonhosted.org/packages/5f/5d/5f6c63eebb5afee93bcaae4ce9a898f3373ca23df3ccaef086d0233a35a7/pydantic_core-2.41.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f41a7489d32336dbf2199c8c0a215390a751c5b014c2c1c5366e817202e9cdf7", size = 2110990, upload-time = "2025-11-04T13:39:58.079Z" }, + { url = "https://files.pythonhosted.org/packages/aa/32/9c2e8ccb57c01111e0fd091f236c7b371c1bccea0fa85247ac55b1e2b6b6/pydantic_core-2.41.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:070259a8818988b9a84a449a2a7337c7f430a22acc0859c6b110aa7212a6d9c0", size = 1896003, upload-time = "2025-11-04T13:39:59.956Z" }, + { url = "https://files.pythonhosted.org/packages/68/b8/a01b53cb0e59139fbc9e4fda3e9724ede8de279097179be4ff31f1abb65a/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e96cea19e34778f8d59fe40775a7a574d95816eb150850a85a7a4c8f4b94ac69", size = 1919200, upload-time = "2025-11-04T13:40:02.241Z" }, + { url = "https://files.pythonhosted.org/packages/38/de/8c36b5198a29bdaade07b5985e80a233a5ac27137846f3bc2d3b40a47360/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed2e99c456e3fadd05c991f8f437ef902e00eedf34320ba2b0842bd1c3ca3a75", size = 2052578, upload-time = "2025-11-04T13:40:04.401Z" }, + { url = "https://files.pythonhosted.org/packages/00/b5/0e8e4b5b081eac6cb3dbb7e60a65907549a1ce035a724368c330112adfdd/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65840751b72fbfd82c3c640cff9284545342a4f1eb1586ad0636955b261b0b05", size = 2208504, upload-time = "2025-11-04T13:40:06.072Z" }, + { url = "https://files.pythonhosted.org/packages/77/56/87a61aad59c7c5b9dc8caad5a41a5545cba3810c3e828708b3d7404f6cef/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e536c98a7626a98feb2d3eaf75944ef6f3dbee447e1f841eae16f2f0a72d8ddc", size = 2335816, upload-time = "2025-11-04T13:40:07.835Z" }, + { url = "https://files.pythonhosted.org/packages/0d/76/941cc9f73529988688a665a5c0ecff1112b3d95ab48f81db5f7606f522d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eceb81a8d74f9267ef4081e246ffd6d129da5d87e37a77c9bde550cb04870c1c", size = 2075366, upload-time = "2025-11-04T13:40:09.804Z" }, + { url = "https://files.pythonhosted.org/packages/d3/43/ebef01f69baa07a482844faaa0a591bad1ef129253ffd0cdaa9d8a7f72d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d38548150c39b74aeeb0ce8ee1d8e82696f4a4e16ddc6de7b1d8823f7de4b9b5", size = 2171698, upload-time = "2025-11-04T13:40:12.004Z" }, + { url = "https://files.pythonhosted.org/packages/b1/87/41f3202e4193e3bacfc2c065fab7706ebe81af46a83d3e27605029c1f5a6/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c23e27686783f60290e36827f9c626e63154b82b116d7fe9adba1fda36da706c", size = 2132603, upload-time = "2025-11-04T13:40:13.868Z" }, + { url = "https://files.pythonhosted.org/packages/49/7d/4c00df99cb12070b6bccdef4a195255e6020a550d572768d92cc54dba91a/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:482c982f814460eabe1d3bb0adfdc583387bd4691ef00b90575ca0d2b6fe2294", size = 2329591, upload-time = "2025-11-04T13:40:15.672Z" }, + { url = "https://files.pythonhosted.org/packages/cc/6a/ebf4b1d65d458f3cda6a7335d141305dfa19bdc61140a884d165a8a1bbc7/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bfea2a5f0b4d8d43adf9d7b8bf019fb46fdd10a2e5cde477fbcb9d1fa08c68e1", size = 2319068, upload-time = "2025-11-04T13:40:17.532Z" }, + { url = "https://files.pythonhosted.org/packages/49/3b/774f2b5cd4192d5ab75870ce4381fd89cf218af999515baf07e7206753f0/pydantic_core-2.41.5-cp312-cp312-win32.whl", hash = "sha256:b74557b16e390ec12dca509bce9264c3bbd128f8a2c376eaa68003d7f327276d", size = 1985908, upload-time = "2025-11-04T13:40:19.309Z" }, + { url = "https://files.pythonhosted.org/packages/86/45/00173a033c801cacf67c190fef088789394feaf88a98a7035b0e40d53dc9/pydantic_core-2.41.5-cp312-cp312-win_amd64.whl", hash = "sha256:1962293292865bca8e54702b08a4f26da73adc83dd1fcf26fbc875b35d81c815", size = 2020145, upload-time = "2025-11-04T13:40:21.548Z" }, + { url = "https://files.pythonhosted.org/packages/f9/22/91fbc821fa6d261b376a3f73809f907cec5ca6025642c463d3488aad22fb/pydantic_core-2.41.5-cp312-cp312-win_arm64.whl", hash = "sha256:1746d4a3d9a794cacae06a5eaaccb4b8643a131d45fbc9af23e353dc0a5ba5c3", size = 1976179, upload-time = "2025-11-04T13:40:23.393Z" }, + { url = "https://files.pythonhosted.org/packages/11/72/90fda5ee3b97e51c494938a4a44c3a35a9c96c19bba12372fb9c634d6f57/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:b96d5f26b05d03cc60f11a7761a5ded1741da411e7fe0909e27a5e6a0cb7b034", size = 2115441, upload-time = "2025-11-04T13:42:39.557Z" }, + { url = "https://files.pythonhosted.org/packages/1f/53/8942f884fa33f50794f119012dc6a1a02ac43a56407adaac20463df8e98f/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:634e8609e89ceecea15e2d61bc9ac3718caaaa71963717bf3c8f38bfde64242c", size = 1930291, upload-time = "2025-11-04T13:42:42.169Z" }, + { url = "https://files.pythonhosted.org/packages/79/c8/ecb9ed9cd942bce09fc888ee960b52654fbdbede4ba6c2d6e0d3b1d8b49c/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:93e8740d7503eb008aa2df04d3b9735f845d43ae845e6dcd2be0b55a2da43cd2", size = 1948632, upload-time = "2025-11-04T13:42:44.564Z" }, + { url = "https://files.pythonhosted.org/packages/2e/1b/687711069de7efa6af934e74f601e2a4307365e8fdc404703afc453eab26/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f15489ba13d61f670dcc96772e733aad1a6f9c429cc27574c6cdaed82d0146ad", size = 2138905, upload-time = "2025-11-04T13:42:47.156Z" }, + { url = "https://files.pythonhosted.org/packages/09/32/59b0c7e63e277fa7911c2fc70ccfb45ce4b98991e7ef37110663437005af/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:7da7087d756b19037bc2c06edc6c170eeef3c3bafcb8f532ff17d64dc427adfd", size = 2110495, upload-time = "2025-11-04T13:42:49.689Z" }, + { url = "https://files.pythonhosted.org/packages/aa/81/05e400037eaf55ad400bcd318c05bb345b57e708887f07ddb2d20e3f0e98/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:aabf5777b5c8ca26f7824cb4a120a740c9588ed58df9b2d196ce92fba42ff8dc", size = 1915388, upload-time = "2025-11-04T13:42:52.215Z" }, + { url = "https://files.pythonhosted.org/packages/6e/0d/e3549b2399f71d56476b77dbf3cf8937cec5cd70536bdc0e374a421d0599/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c007fe8a43d43b3969e8469004e9845944f1a80e6acd47c150856bb87f230c56", size = 1942879, upload-time = "2025-11-04T13:42:56.483Z" }, + { url = "https://files.pythonhosted.org/packages/f7/07/34573da085946b6a313d7c42f82f16e8920bfd730665de2d11c0c37a74b5/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76d0819de158cd855d1cbb8fcafdf6f5cf1eb8e470abe056d5d161106e38062b", size = 2139017, upload-time = "2025-11-04T13:42:59.471Z" }, + { url = "https://files.pythonhosted.org/packages/e6/b0/1a2aa41e3b5a4ba11420aba2d091b2d17959c8d1519ece3627c371951e73/pydantic_core-2.41.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b5819cd790dbf0c5eb9f82c73c16b39a65dd6dd4d1439dcdea7816ec9adddab8", size = 2103351, upload-time = "2025-11-04T13:43:02.058Z" }, + { url = "https://files.pythonhosted.org/packages/a4/ee/31b1f0020baaf6d091c87900ae05c6aeae101fa4e188e1613c80e4f1ea31/pydantic_core-2.41.5-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:5a4e67afbc95fa5c34cf27d9089bca7fcab4e51e57278d710320a70b956d1b9a", size = 1925363, upload-time = "2025-11-04T13:43:05.159Z" }, + { url = "https://files.pythonhosted.org/packages/e1/89/ab8e86208467e467a80deaca4e434adac37b10a9d134cd2f99b28a01e483/pydantic_core-2.41.5-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ece5c59f0ce7d001e017643d8d24da587ea1f74f6993467d85ae8a5ef9d4f42b", size = 2135615, upload-time = "2025-11-04T13:43:08.116Z" }, + { url = "https://files.pythonhosted.org/packages/99/0a/99a53d06dd0348b2008f2f30884b34719c323f16c3be4e6cc1203b74a91d/pydantic_core-2.41.5-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:16f80f7abe3351f8ea6858914ddc8c77e02578544a0ebc15b4c2e1a0e813b0b2", size = 2175369, upload-time = "2025-11-04T13:43:12.49Z" }, + { url = "https://files.pythonhosted.org/packages/6d/94/30ca3b73c6d485b9bb0bc66e611cff4a7138ff9736b7e66bcf0852151636/pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:33cb885e759a705b426baada1fe68cbb0a2e68e34c5d0d0289a364cf01709093", size = 2144218, upload-time = "2025-11-04T13:43:15.431Z" }, + { url = "https://files.pythonhosted.org/packages/87/57/31b4f8e12680b739a91f472b5671294236b82586889ef764b5fbc6669238/pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:c8d8b4eb992936023be7dee581270af5c6e0697a8559895f527f5b7105ecd36a", size = 2329951, upload-time = "2025-11-04T13:43:18.062Z" }, + { url = "https://files.pythonhosted.org/packages/7d/73/3c2c8edef77b8f7310e6fb012dbc4b8551386ed575b9eb6fb2506e28a7eb/pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:242a206cd0318f95cd21bdacff3fcc3aab23e79bba5cac3db5a841c9ef9c6963", size = 2318428, upload-time = "2025-11-04T13:43:20.679Z" }, + { url = "https://files.pythonhosted.org/packages/2f/02/8559b1f26ee0d502c74f9cca5c0d2fd97e967e083e006bbbb4e97f3a043a/pydantic_core-2.41.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d3a978c4f57a597908b7e697229d996d77a6d3c94901e9edee593adada95ce1a", size = 2147009, upload-time = "2025-11-04T13:43:23.286Z" }, + { url = "https://files.pythonhosted.org/packages/5f/9b/1b3f0e9f9305839d7e84912f9e8bfbd191ed1b1ef48083609f0dabde978c/pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b2379fa7ed44ddecb5bfe4e48577d752db9fc10be00a6b7446e9663ba143de26", size = 2101980, upload-time = "2025-11-04T13:43:25.97Z" }, + { url = "https://files.pythonhosted.org/packages/a4/ed/d71fefcb4263df0da6a85b5d8a7508360f2f2e9b3bf5814be9c8bccdccc1/pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:266fb4cbf5e3cbd0b53669a6d1b039c45e3ce651fd5442eff4d07c2cc8d66808", size = 1923865, upload-time = "2025-11-04T13:43:28.763Z" }, + { url = "https://files.pythonhosted.org/packages/ce/3a/626b38db460d675f873e4444b4bb030453bbe7b4ba55df821d026a0493c4/pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58133647260ea01e4d0500089a8c4f07bd7aa6ce109682b1426394988d8aaacc", size = 2134256, upload-time = "2025-11-04T13:43:31.71Z" }, + { url = "https://files.pythonhosted.org/packages/83/d9/8412d7f06f616bbc053d30cb4e5f76786af3221462ad5eee1f202021eb4e/pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:287dad91cfb551c363dc62899a80e9e14da1f0e2b6ebde82c806612ca2a13ef1", size = 2174762, upload-time = "2025-11-04T13:43:34.744Z" }, + { url = "https://files.pythonhosted.org/packages/55/4c/162d906b8e3ba3a99354e20faa1b49a85206c47de97a639510a0e673f5da/pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:03b77d184b9eb40240ae9fd676ca364ce1085f203e1b1256f8ab9984dca80a84", size = 2143141, upload-time = "2025-11-04T13:43:37.701Z" }, + { url = "https://files.pythonhosted.org/packages/1f/f2/f11dd73284122713f5f89fc940f370d035fa8e1e078d446b3313955157fe/pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:a668ce24de96165bb239160b3d854943128f4334822900534f2fe947930e5770", size = 2330317, upload-time = "2025-11-04T13:43:40.406Z" }, + { url = "https://files.pythonhosted.org/packages/88/9d/b06ca6acfe4abb296110fb1273a4d848a0bfb2ff65f3ee92127b3244e16b/pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f14f8f046c14563f8eb3f45f499cc658ab8d10072961e07225e507adb700e93f", size = 2316992, upload-time = "2025-11-04T13:43:43.602Z" }, + { url = "https://files.pythonhosted.org/packages/36/c7/cfc8e811f061c841d7990b0201912c3556bfeb99cdcb7ed24adc8d6f8704/pydantic_core-2.41.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:56121965f7a4dc965bff783d70b907ddf3d57f6eba29b6d2e5dabfaf07799c51", size = 2145302, upload-time = "2025-11-04T13:43:46.64Z" }, +] + +[[package]] +name = "pydeprecate" +version = "0.5.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/53/8c/a2b828824b6a5d7826a3af3948b8a312064c28ef2a3312a86b003bbbe21f/pydeprecate-0.5.0.tar.gz", hash = "sha256:d4dad1a44673257385674276e08415da626f002ea12fe73cf1d70d391b63f936", size = 61364, upload-time = "2026-02-23T22:35:49.723Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4a/40/ff4ca100d6d698ce17cdc82799a4b365c3f73c1746a440bba543a1c8a4db/pydeprecate-0.5.0-py3-none-any.whl", hash = "sha256:1668d1e152f6ec6127a8ce8bd501215a4c3d756e46ced542d0222f1df2c78eca", size = 38022, upload-time = "2026-02-23T22:35:48.512Z" }, ] [[package]] name = "pygments" -version = "2.19.1" +version = "2.20.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/7c/2d/c3338d48ea6cc0feb8446d8e6937e1408088a72a39937982cc6111d17f84/pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f", size = 4968581, upload-time = "2025-01-06T17:26:30.443Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c3/b2/bc9c9196916376152d655522fdcebac55e66de6603a76a02bca1b6414f6c/pygments-2.20.0.tar.gz", hash = "sha256:6757cd03768053ff99f3039c1a36d6c0aa0b263438fcab17520b30a303a82b5f", size = 4955991, upload-time = "2026-03-29T13:29:33.898Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/8a/0b/9fcc47d19c48b59121088dd6da2488a49d5f72dacf8262e2790a1d2c7d15/pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c", size = 1225293, upload-time = "2025-01-06T17:26:25.553Z" }, + { url = "https://files.pythonhosted.org/packages/f4/7e/a72dd26f3b0f4f2bf1dd8923c85f7ceb43172af56d63c7383eb62b332364/pygments-2.20.0-py3-none-any.whl", hash = "sha256:81a9e26dd42fd28a23a2d169d86d7ac03b46e2f8b59ed4698fb4785f946d0176", size = 1231151, upload-time = "2026-03-29T13:29:30.038Z" }, ] [[package]] name = "pymdown-extensions" -version = "10.20" +version = "10.21.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "markdown" }, { name = "pyyaml" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/3e/35/e3814a5b7df295df69d035cfb8aab78b2967cdf11fcfae7faed726b66664/pymdown_extensions-10.20.tar.gz", hash = "sha256:5c73566ab0cf38c6ba084cb7c5ea64a119ae0500cce754ccb682761dfea13a52", size = 852774, upload-time = "2025-12-31T19:59:42.211Z" } +sdist = { url = "https://files.pythonhosted.org/packages/df/08/f1c908c581fd11913da4711ea7ba32c0eee40b0190000996bb863b0c9349/pymdown_extensions-10.21.2.tar.gz", hash = "sha256:c3f55a5b8a1d0edf6699e35dcbea71d978d34ff3fa79f3d807b8a5b3fa90fbdc", size = 853922, upload-time = "2026-03-29T15:01:55.233Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ea/10/47caf89cbb52e5bb764696fd52a8c591a2f0e851a93270c05a17f36000b5/pymdown_extensions-10.20-py3-none-any.whl", hash = "sha256:ea9e62add865da80a271d00bfa1c0fa085b20d133fb3fc97afdc88e682f60b2f", size = 268733, upload-time = "2025-12-31T19:59:40.652Z" }, + { url = "https://files.pythonhosted.org/packages/f7/27/a2fc51a4a122dfd1015e921ae9d22fee3d20b0b8080d9a704578bf9deece/pymdown_extensions-10.21.2-py3-none-any.whl", hash = "sha256:5c0fd2a2bea14eb39af8ff284f1066d898ab2187d81b889b75d46d4348c01638", size = 268901, upload-time = "2026-03-29T15:01:53.244Z" }, ] [[package]] name = "pyparsing" -version = "3.2.3" +version = "3.3.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/bb/22/f1129e69d94ffff626bdb5c835506b3a5b4f3d070f17ea295e12c2c6f60f/pyparsing-3.2.3.tar.gz", hash = "sha256:b9c13f1ab8b3b542f72e28f634bad4de758ab3ce4546e4301970ad6fa77c38be", size = 1088608, upload-time = "2025-03-25T05:01:28.114Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f3/91/9c6ee907786a473bf81c5f53cf703ba0957b23ab84c264080fb5a450416f/pyparsing-3.3.2.tar.gz", hash = "sha256:c777f4d763f140633dcb6d8a3eda953bf7a214dc4eff598413c070bcdc117cbc", size = 6851574, upload-time = "2026-01-21T03:57:59.36Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/05/e7/df2285f3d08fee213f2d041540fa4fc9ca6c2d44cf36d3a035bf2a8d2bcc/pyparsing-3.2.3-py3-none-any.whl", hash = "sha256:a749938e02d6fd0b59b356ca504a24982314bb090c383e3cf201c95ef7e2bfcf", size = 111120, upload-time = "2025-03-25T05:01:24.908Z" }, + { url = "https://files.pythonhosted.org/packages/10/bd/c038d7cc38edc1aa5bf91ab8068b63d4308c66c4c8bb3cbba7dfbc049f9c/pyparsing-3.3.2-py3-none-any.whl", hash = "sha256:850ba148bd908d7e2411587e247a1e4f0327839c40e2e5e6d05a007ecc69911d", size = 122781, upload-time = "2026-01-21T03:57:55.912Z" }, ] [[package]] name = "pypdfium2" -version = "4.30.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a1/14/838b3ba247a0ba92e4df5d23f2bea9478edcfd72b78a39d6ca36ccd84ad2/pypdfium2-4.30.0.tar.gz", hash = "sha256:48b5b7e5566665bc1015b9d69c1ebabe21f6aee468b509531c3c8318eeee2e16", size = 140239, upload-time = "2024-05-09T18:33:17.552Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c7/9a/c8ff5cc352c1b60b0b97642ae734f51edbab6e28b45b4fcdfe5306ee3c83/pypdfium2-4.30.0-py3-none-macosx_10_13_x86_64.whl", hash = "sha256:b33ceded0b6ff5b2b93bc1fe0ad4b71aa6b7e7bd5875f1ca0cdfb6ba6ac01aab", size = 2837254, upload-time = "2024-05-09T18:32:48.653Z" }, - { url = "https://files.pythonhosted.org/packages/21/8b/27d4d5409f3c76b985f4ee4afe147b606594411e15ac4dc1c3363c9a9810/pypdfium2-4.30.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:4e55689f4b06e2d2406203e771f78789bd4f190731b5d57383d05cf611d829de", size = 2707624, upload-time = "2024-05-09T18:32:51.458Z" }, - { url = "https://files.pythonhosted.org/packages/11/63/28a73ca17c24b41a205d658e177d68e198d7dde65a8c99c821d231b6ee3d/pypdfium2-4.30.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e6e50f5ce7f65a40a33d7c9edc39f23140c57e37144c2d6d9e9262a2a854854", size = 2793126, upload-time = "2024-05-09T18:32:53.581Z" }, - { url = "https://files.pythonhosted.org/packages/d1/96/53b3ebf0955edbd02ac6da16a818ecc65c939e98fdeb4e0958362bd385c8/pypdfium2-4.30.0-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3d0dd3ecaffd0b6dbda3da663220e705cb563918249bda26058c6036752ba3a2", size = 2591077, upload-time = "2024-05-09T18:32:55.99Z" }, - { url = "https://files.pythonhosted.org/packages/ec/ee/0394e56e7cab8b5b21f744d988400948ef71a9a892cbeb0b200d324ab2c7/pypdfium2-4.30.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cc3bf29b0db8c76cdfaac1ec1cde8edf211a7de7390fbf8934ad2aa9b4d6dfad", size = 2864431, upload-time = "2024-05-09T18:32:57.911Z" }, - { url = "https://files.pythonhosted.org/packages/65/cd/3f1edf20a0ef4a212a5e20a5900e64942c5a374473671ac0780eaa08ea80/pypdfium2-4.30.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1f78d2189e0ddf9ac2b7a9b9bd4f0c66f54d1389ff6c17e9fd9dc034d06eb3f", size = 2812008, upload-time = "2024-05-09T18:32:59.886Z" }, - { url = "https://files.pythonhosted.org/packages/c8/91/2d517db61845698f41a2a974de90762e50faeb529201c6b3574935969045/pypdfium2-4.30.0-py3-none-musllinux_1_1_aarch64.whl", hash = "sha256:5eda3641a2da7a7a0b2f4dbd71d706401a656fea521b6b6faa0675b15d31a163", size = 6181543, upload-time = "2024-05-09T18:33:02.597Z" }, - { url = "https://files.pythonhosted.org/packages/ba/c4/ed1315143a7a84b2c7616569dfb472473968d628f17c231c39e29ae9d780/pypdfium2-4.30.0-py3-none-musllinux_1_1_i686.whl", hash = "sha256:0dfa61421b5eb68e1188b0b2231e7ba35735aef2d867d86e48ee6cab6975195e", size = 6175911, upload-time = "2024-05-09T18:33:05.376Z" }, - { url = "https://files.pythonhosted.org/packages/7a/c4/9e62d03f414e0e3051c56d5943c3bf42aa9608ede4e19dc96438364e9e03/pypdfium2-4.30.0-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:f33bd79e7a09d5f7acca3b0b69ff6c8a488869a7fab48fdf400fec6e20b9c8be", size = 6267430, upload-time = "2024-05-09T18:33:08.067Z" }, - { url = "https://files.pythonhosted.org/packages/90/47/eda4904f715fb98561e34012826e883816945934a851745570521ec89520/pypdfium2-4.30.0-py3-none-win32.whl", hash = "sha256:ee2410f15d576d976c2ab2558c93d392a25fb9f6635e8dd0a8a3a5241b275e0e", size = 2775951, upload-time = "2024-05-09T18:33:10.567Z" }, - { url = "https://files.pythonhosted.org/packages/25/bd/56d9ec6b9f0fc4e0d95288759f3179f0fcd34b1a1526b75673d2f6d5196f/pypdfium2-4.30.0-py3-none-win_amd64.whl", hash = "sha256:90dbb2ac07be53219f56be09961eb95cf2473f834d01a42d901d13ccfad64b4c", size = 2892098, upload-time = "2024-05-09T18:33:13.107Z" }, - { url = "https://files.pythonhosted.org/packages/be/7a/097801205b991bc3115e8af1edb850d30aeaf0118520b016354cf5ccd3f6/pypdfium2-4.30.0-py3-none-win_arm64.whl", hash = "sha256:119b2969a6d6b1e8d55e99caaf05290294f2d0fe49c12a3f17102d01c441bd29", size = 2752118, upload-time = "2024-05-09T18:33:15.489Z" }, +version = "5.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/3b/01/be763b9081c7eb823196e7d13d9c145bf75ac43f3c1466de81c21c24b381/pypdfium2-5.6.0.tar.gz", hash = "sha256:bcb9368acfe3547054698abbdae68ba0cbd2d3bda8e8ee437e061deef061976d", size = 270714, upload-time = "2026-03-08T01:05:06.5Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9d/b1/129ed0177521a93a892f8a6a215dd3260093e30e77ef7035004bb8af7b6c/pypdfium2-5.6.0-py3-none-android_23_arm64_v8a.whl", hash = "sha256:fb7858c9707708555b4a719b5548a6e7f5d26bc82aef55ae4eb085d7a2190b11", size = 3346059, upload-time = "2026-03-08T01:04:21.37Z" }, + { url = "https://files.pythonhosted.org/packages/86/34/cbdece6886012180a7f2c7b2c360c415cf5e1f83f1973d2c9201dae3506a/pypdfium2-5.6.0-py3-none-android_23_armeabi_v7a.whl", hash = "sha256:6a7e1f4597317786f994bfb947eef480e53933f804a990193ab89eef8243f805", size = 2804418, upload-time = "2026-03-08T01:04:23.384Z" }, + { url = "https://files.pythonhosted.org/packages/6e/f6/9f9e190fe0e5a6b86b82f83bd8b5d3490348766062381140ca5cad8e00b1/pypdfium2-5.6.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:e468c38997573f0e86f03273c2c1fbdea999de52ba43fee96acaa2f6b2ad35f7", size = 3412541, upload-time = "2026-03-08T01:04:25.45Z" }, + { url = "https://files.pythonhosted.org/packages/ee/8d/e57492cb2228ba56ed57de1ff044c8ac114b46905f8b1445c33299ba0488/pypdfium2-5.6.0-py3-none-macosx_11_0_x86_64.whl", hash = "sha256:ad3abddc5805424f962e383253ccad6a0d1d2ebd86afa9a9e1b9ca659773cd0d", size = 3592320, upload-time = "2026-03-08T01:04:27.509Z" }, + { url = "https://files.pythonhosted.org/packages/f9/8a/8ab82e33e9c551494cbe1526ea250ca8cc4e9e98d6a4fc6b6f8d959aa1d1/pypdfium2-5.6.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f6b5eb9eae5c45076395454522ca26add72ba8bd1fe473e1e4721aa58521470c", size = 3596450, upload-time = "2026-03-08T01:04:29.183Z" }, + { url = "https://files.pythonhosted.org/packages/f5/b5/602a792282312ccb158cc63849528079d94b0a11efdc61f2a359edfb41e9/pypdfium2-5.6.0-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:258624da8ef45cdc426e11b33e9d83f9fb723c1c201c6e0f4ab5a85966c6b876", size = 3325442, upload-time = "2026-03-08T01:04:30.886Z" }, + { url = "https://files.pythonhosted.org/packages/81/1f/9e48ec05ed8d19d736c2d1f23c1bd0f20673f02ef846a2576c69e237f15d/pypdfium2-5.6.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e9367451c8a00931d6612db0822525a18c06f649d562cd323a719e46ac19c9bb", size = 3727434, upload-time = "2026-03-08T01:04:33.619Z" }, + { url = "https://files.pythonhosted.org/packages/33/90/0efd020928b4edbd65f4f3c2af0c84e20b43a3ada8fa6d04f999a97afe7a/pypdfium2-5.6.0-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a757869f891eac1cc1372e38a4aa01adac8abc8fe2a8a4e2ebf50595e3bf5937", size = 4139029, upload-time = "2026-03-08T01:04:36.08Z" }, + { url = "https://files.pythonhosted.org/packages/ff/49/a640b288a48dab1752281dd9b72c0679fccea107874e80a65a606b00efa9/pypdfium2-5.6.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:515be355222cc57ae9e62cd5c7c350b8e0c863efc539f80c7d75e2811ba45cb6", size = 3646387, upload-time = "2026-03-08T01:04:38.151Z" }, + { url = "https://files.pythonhosted.org/packages/b0/3b/a344c19c01021eeb5d830c102e4fc9b1602f19c04aa7d11abbe2d188fd8e/pypdfium2-5.6.0-py3-none-manylinux_2_27_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d1c4753c7caf7d004211d7f57a21f10d127f5e0e5510a14d24bc073e7220a3ea", size = 3097212, upload-time = "2026-03-08T01:04:40.776Z" }, + { url = "https://files.pythonhosted.org/packages/50/96/e48e13789ace22aeb9b7510904a1b1493ec588196e11bbacc122da330b3d/pypdfium2-5.6.0-py3-none-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c49729090281fdd85775fb8912c10bd19e99178efaa98f145ab06e7ce68554d2", size = 2965026, upload-time = "2026-03-08T01:04:42.857Z" }, + { url = "https://files.pythonhosted.org/packages/cb/06/3100e44d4935f73af8f5d633d3bd40f0d36d606027085a0ef1f0566a6320/pypdfium2-5.6.0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:a4a1749a8d4afd62924a8d95cfa4f2e26fc32957ce34ac3b674be6f127ed252e", size = 4131431, upload-time = "2026-03-08T01:04:44.982Z" }, + { url = "https://files.pythonhosted.org/packages/64/ef/d8df63569ce9a66c8496057782eb8af78e0d28667922d62ec958434e3d4b/pypdfium2-5.6.0-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:36469ebd0fdffb7130ce45ed9c44f8232d91571c89eb851bd1633c64b6f6114f", size = 3747469, upload-time = "2026-03-08T01:04:46.702Z" }, + { url = "https://files.pythonhosted.org/packages/a6/47/fd2c6a67a49fade1acd719fbd11f7c375e7219912923ef2de0ea0ac1544e/pypdfium2-5.6.0-py3-none-musllinux_1_2_i686.whl", hash = "sha256:9da900df09be3cf546b637a127a7b6428fb22d705951d731269e25fd3adef457", size = 4337578, upload-time = "2026-03-08T01:04:49.007Z" }, + { url = "https://files.pythonhosted.org/packages/6b/f5/836c83e54b01e09478c4d6bf4912651d6053c932250fcee953f5c72d8e4a/pypdfium2-5.6.0-py3-none-musllinux_1_2_ppc64le.whl", hash = "sha256:45fccd5622233c5ec91a885770ae7dd4004d4320ac05a4ad8fa03a66dea40244", size = 4376104, upload-time = "2026-03-08T01:04:51.04Z" }, + { url = "https://files.pythonhosted.org/packages/6e/7f/b940b6a1664daf8f9bad87c6c99b84effa3611615b8708d10392dc33036c/pypdfium2-5.6.0-py3-none-musllinux_1_2_riscv64.whl", hash = "sha256:282dc030e767cd61bd0299f9d581052b91188e2b87561489057a8e7963e7e0cb", size = 3929824, upload-time = "2026-03-08T01:04:53.544Z" }, + { url = "https://files.pythonhosted.org/packages/88/79/00267d92a6a58c229e364d474f5698efe446e0c7f4f152f58d0138715e99/pypdfium2-5.6.0-py3-none-musllinux_1_2_s390x.whl", hash = "sha256:a1c1dfe950382c76a7bba1ba160ec5e40df8dd26b04a1124ae268fda55bc4cbe", size = 4270201, upload-time = "2026-03-08T01:04:55.81Z" }, + { url = "https://files.pythonhosted.org/packages/e1/ab/b127f38aba41746bdf9ace15ba08411d7ef6ecba1326d529ba414eb1ed50/pypdfium2-5.6.0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:43b0341ca6feb6c92e4b7a9eb4813e5466f5f5e8b6baeb14df0a94d5f312c00b", size = 4180793, upload-time = "2026-03-08T01:04:57.961Z" }, + { url = "https://files.pythonhosted.org/packages/0e/8c/a01c8e4302448b614d25a85c08298b0d3e9dfbdac5bd1b2f32c9b02e83d9/pypdfium2-5.6.0-py3-none-win32.whl", hash = "sha256:9dfcd4ff49a2b9260d00e38539ab28190d59e785e83030b30ffaf7a29c42155d", size = 3596753, upload-time = "2026-03-08T01:05:00.566Z" }, + { url = "https://files.pythonhosted.org/packages/9b/5f/2d871adf46761bb002a62686545da6348afe838d19af03df65d1ece786a2/pypdfium2-5.6.0-py3-none-win_amd64.whl", hash = "sha256:c6bc8dd63d0568f4b592f3e03de756afafc0e44aa1fe8878cc4aba1b11ae7374", size = 3716526, upload-time = "2026-03-08T01:05:02.433Z" }, + { url = "https://files.pythonhosted.org/packages/3a/80/0d9b162098597fbe3ac2b269b1682c0c3e8db9ba87679603fdd9b19afaa6/pypdfium2-5.6.0-py3-none-win_arm64.whl", hash = "sha256:5538417b199bdcb3207370c88df61f2ba3dac7a3253f82e1aa2708e6376b6f90", size = 3515049, upload-time = "2026-03-08T01:05:04.587Z" }, ] [[package]] @@ -3291,20 +11274,20 @@ wheels = [ [[package]] name = "pytest" -version = "8.4.1" +version = "9.0.3" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "colorama", marker = "sys_platform == 'win32' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "exceptiongroup", marker = "python_full_version < '3.11' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "colorama", marker = "sys_platform == 'win32' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "exceptiongroup", marker = "python_full_version < '3.11' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, { name = "iniconfig" }, { name = "packaging" }, { name = "pluggy" }, { name = "pygments" }, - { name = "tomli", marker = "python_full_version < '3.11' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "tomli", marker = "python_full_version < '3.11' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/08/ba/45911d754e8eba3d5a841a5ce61a65a685ff1798421ac054f85aa8747dfb/pytest-8.4.1.tar.gz", hash = "sha256:7c67fd69174877359ed9371ec3af8a3d2b04741818c51e5e99cc1742251fa93c", size = 1517714, upload-time = "2025-06-18T05:48:06.109Z" } +sdist = { url = "https://files.pythonhosted.org/packages/7d/0d/549bd94f1a0a402dc8cf64563a117c0f3765662e2e668477624baeec44d5/pytest-9.0.3.tar.gz", hash = "sha256:b86ada508af81d19edeb213c681b1d48246c1a91d304c6c81a427674c17eb91c", size = 1572165, upload-time = "2026-04-07T17:16:18.027Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/29/16/c8a903f4c4dffe7a12843191437d7cd8e32751d5de349d45d3fe69544e87/pytest-8.4.1-py3-none-any.whl", hash = "sha256:539c70ba6fcead8e78eebbf1115e8b589e7565830d7d006a8723f19ac8a0afb7", size = 365474, upload-time = "2025-06-18T05:48:03.955Z" }, + { url = "https://files.pythonhosted.org/packages/d4/24/a372aaf5c9b7208e7112038812994107bc65a84cd00e0354a88c2c77a617/pytest-9.0.3-py3-none-any.whl", hash = "sha256:2c5efc453d45394fdd706ade797c0a81091eccd1d6e4bccfcd476e2b8e0ab5d9", size = 375249, upload-time = "2026-04-07T17:16:16.13Z" }, ] [[package]] @@ -3416,30 +11399,33 @@ dependencies = [ { name = "h5py" }, { name = "huggingface-hub" }, { name = "langdetect" }, - { name = "numpy" }, + { name = "numpy", version = "1.26.4", source = { registry = "https://pypi.org/simple" }, marker = "extra == 'extra-16-inference-models-onnx-jp6-cu126' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and extra == 'extra-16-inference-models-falcon-perception') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "2.4.4", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and extra == 'extra-16-inference-models-falcon-perception') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, { name = "onnx" }, - { name = "opencv-python" }, + { name = "opencv-python", version = "4.11.0.86", source = { registry = "https://pypi.org/simple" }, marker = "extra == 'extra-16-inference-models-onnx-jp6-cu126' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "opencv-python", version = "4.13.0.92", source = { registry = "https://pypi.org/simple" }, marker = "extra == 'extra-16-inference-models-falcon-perception' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, { name = "pillow" }, { name = "pyclipper" }, { name = "pypdfium2" }, { name = "rapidfuzz" }, - { name = "scipy" }, + { name = "scipy", version = "1.15.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "scipy", version = "1.17.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, { name = "shapely" }, - { name = "torch", version = "2.6.0+cu124", source = { registry = "https://download.pytorch.org/whl/cu124" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torch", version = "2.7.1", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torch", version = "2.7.1+cpu", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torch", version = "2.7.1+cu118", source = { registry = "https://download.pytorch.org/whl/cu118" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torch", version = "2.7.1+cu128", source = { registry = "https://download.pytorch.org/whl/cu128" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torch", version = "2.8.0", source = { registry = "https://pypi.jetson-ai-lab.io/jp6/cu126/+simple" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torch", version = "2.8.0", source = { registry = "https://pypi.org/simple" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torchvision", version = "0.21.0+cu124", source = { registry = "https://download.pytorch.org/whl/cu124" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torchvision", version = "0.22.1", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torchvision", version = "0.22.1", source = { registry = "https://download.pytorch.org/whl/cu128" }, marker = "(platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torchvision", version = "0.22.1+cpu", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(platform_machine != 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torchvision", version = "0.22.1+cu118", source = { registry = "https://download.pytorch.org/whl/cu118" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torchvision", version = "0.22.1+cu128", source = { registry = "https://download.pytorch.org/whl/cu128" }, marker = "(platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torchvision", version = "0.23.0", source = { registry = "https://pypi.jetson-ai-lab.io/jp6/cu126/+simple" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torchvision", version = "0.23.0", source = { registry = "https://pypi.org/simple" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torch", version = "2.6.0+cu124", source = { registry = "https://download.pytorch.org/whl/cu124" }, marker = "extra == 'extra-16-inference-models-torch-cu124' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torch", version = "2.7.1+cu118", source = { registry = "https://download.pytorch.org/whl/cu118" }, marker = "extra == 'extra-16-inference-models-torch-cu118' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torch", version = "2.11.0", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torch", version = "2.11.0", source = { registry = "https://pypi.jetson-ai-lab.io/jp6/cu126/+simple" }, marker = "extra == 'extra-16-inference-models-torch-jp6-cu126' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126')" }, + { name = "torch", version = "2.11.0", source = { registry = "https://pypi.org/simple" }, marker = "extra == 'extra-16-inference-models-torch-cu126' or extra == 'extra-16-inference-models-torch-cu130' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torch", version = "2.11.0+cpu", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torch", version = "2.11.0+cu128", source = { registry = "https://download.pytorch.org/whl/cu128" }, marker = "extra == 'extra-16-inference-models-torch-cu128' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torchvision", version = "0.21.0+cu124", source = { registry = "https://download.pytorch.org/whl/cu124" }, marker = "extra == 'extra-16-inference-models-torch-cu124' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torchvision", version = "0.22.1+cu118", source = { registry = "https://download.pytorch.org/whl/cu118" }, marker = "extra == 'extra-16-inference-models-torch-cu118' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torchvision", version = "0.26.0", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torchvision", version = "0.26.0", source = { registry = "https://pypi.jetson-ai-lab.io/jp6/cu126/+simple" }, marker = "extra == 'extra-16-inference-models-torch-jp6-cu126' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126')" }, + { name = "torchvision", version = "0.26.0", source = { registry = "https://pypi.org/simple" }, marker = "extra == 'extra-16-inference-models-torch-cu126' or extra == 'extra-16-inference-models-torch-cu130' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torchvision", version = "0.26.0+cpu", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torchvision", version = "0.26.0+cu128", source = { registry = "https://download.pytorch.org/whl/cu128" }, marker = "extra == 'extra-16-inference-models-torch-cu128' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, { name = "tqdm" }, { name = "validators" }, ] @@ -3450,16 +11436,25 @@ wheels = [ [[package]] name = "pytools" -version = "2025.1.6" +version = "2025.2.5" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "platformdirs" }, { name = "siphash24" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/4e/74/e99132a545c6f0bb95a3c271b96e5eb7b6b282aede787e738bbe7f4f3879/pytools-2025.1.6.tar.gz", hash = "sha256:938e1df9997ba5ac771034a49b0e63801bd93a2b92e6a38f43241519a1fb322b", size = 96212, upload-time = "2025-05-27T20:15:38.697Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c3/7b/f885a57e61ded45b5b10ca60f0b7575c9fb9a282e7513d0e23a33ee647e1/pytools-2025.2.5.tar.gz", hash = "sha256:a7f5350644d46d98ee9c7e67b4b41693308aa0f5e9b188d8f0694b27dc94e3a2", size = 85594, upload-time = "2025-10-07T15:53:30.49Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f6/84/c42c29ca4bff35baa286df70b0097e0b1c88fd57e8e6bdb09cb161a6f3c1/pytools-2025.2.5-py3-none-any.whl", hash = "sha256:42e93751ec425781e103bbcd769ba35ecbacd43339c2905401608f2fdc30cf19", size = 98811, upload-time = "2025-10-07T15:53:29.089Z" }, +] + +[[package]] +name = "pytz" +version = "2026.1.post1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/56/db/b8721d71d945e6a8ac63c0fc900b2067181dbb50805958d4d4661cf7d277/pytz-2026.1.post1.tar.gz", hash = "sha256:3378dde6a0c3d26719182142c56e60c7f9af7e968076f31aae569d72a0358ee1", size = 321088, upload-time = "2026-03-03T07:47:50.683Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b3/a3/b3ce189e57f108fe1242656d8efec264b11c6b8a612963cce23590a18f59/pytools-2025.1.6-py3-none-any.whl", hash = "sha256:c7652cb6faf015e20e452f744bf7c12b207dd2e88973b8a79a4365a3d510f669", size = 95995, upload-time = "2025-05-27T20:15:37.251Z" }, + { url = "https://files.pythonhosted.org/packages/10/99/781fe0c827be2742bcc775efefccb3b048a3a9c6ce9aec0cbf4a101677e5/pytz-2026.1.post1-py2.py3-none-any.whl", hash = "sha256:f2fd16142fda348286a75e1a524be810bb05d444e5a081f37f7affc635035f7a", size = 510489, upload-time = "2026-03-03T07:47:49.167Z" }, ] [[package]] @@ -3468,7 +11463,6 @@ version = "2.2.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cffi" }, - { name = "pkgconfig" }, ] sdist = { url = "https://files.pythonhosted.org/packages/7b/88/f73dae807ec68b228fba72507105e3ba80a561dc0bade0004ce24fd118fc/pyvips-2.2.3.tar.gz", hash = "sha256:43bceced0db492654c93008246a58a508e0373ae1621116b87b322f2ac72212f", size = 56626, upload-time = "2024-04-28T11:19:58.158Z" } @@ -3490,37 +11484,38 @@ wheels = [ [[package]] name = "pyyaml" -version = "6.0.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/54/ed/79a089b6be93607fa5cdaedf301d7dfb23af5f25c398d5ead2525b063e17/pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e", size = 130631, upload-time = "2024-08-06T20:33:50.674Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/9b/95/a3fac87cb7158e231b5a6012e438c647e1a87f09f8e0d123acec8ab8bf71/PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086", size = 184199, upload-time = "2024-08-06T20:31:40.178Z" }, - { url = "https://files.pythonhosted.org/packages/c7/7a/68bd47624dab8fd4afbfd3c48e3b79efe09098ae941de5b58abcbadff5cb/PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf", size = 171758, upload-time = "2024-08-06T20:31:42.173Z" }, - { url = "https://files.pythonhosted.org/packages/49/ee/14c54df452143b9ee9f0f29074d7ca5516a36edb0b4cc40c3f280131656f/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237", size = 718463, upload-time = "2024-08-06T20:31:44.263Z" }, - { url = "https://files.pythonhosted.org/packages/4d/61/de363a97476e766574650d742205be468921a7b532aa2499fcd886b62530/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b", size = 719280, upload-time = "2024-08-06T20:31:50.199Z" }, - { url = "https://files.pythonhosted.org/packages/6b/4e/1523cb902fd98355e2e9ea5e5eb237cbc5f3ad5f3075fa65087aa0ecb669/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed", size = 751239, upload-time = "2024-08-06T20:31:52.292Z" }, - { url = "https://files.pythonhosted.org/packages/b7/33/5504b3a9a4464893c32f118a9cc045190a91637b119a9c881da1cf6b7a72/PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180", size = 695802, upload-time = "2024-08-06T20:31:53.836Z" }, - { url = "https://files.pythonhosted.org/packages/5c/20/8347dcabd41ef3a3cdc4f7b7a2aff3d06598c8779faa189cdbf878b626a4/PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68", size = 720527, upload-time = "2024-08-06T20:31:55.565Z" }, - { url = "https://files.pythonhosted.org/packages/be/aa/5afe99233fb360d0ff37377145a949ae258aaab831bde4792b32650a4378/PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99", size = 144052, upload-time = "2024-08-06T20:31:56.914Z" }, - { url = "https://files.pythonhosted.org/packages/b5/84/0fa4b06f6d6c958d207620fc60005e241ecedceee58931bb20138e1e5776/PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e", size = 161774, upload-time = "2024-08-06T20:31:58.304Z" }, - { url = "https://files.pythonhosted.org/packages/f8/aa/7af4e81f7acba21a4c6be026da38fd2b872ca46226673c89a758ebdc4fd2/PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774", size = 184612, upload-time = "2024-08-06T20:32:03.408Z" }, - { url = "https://files.pythonhosted.org/packages/8b/62/b9faa998fd185f65c1371643678e4d58254add437edb764a08c5a98fb986/PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee", size = 172040, upload-time = "2024-08-06T20:32:04.926Z" }, - { url = "https://files.pythonhosted.org/packages/ad/0c/c804f5f922a9a6563bab712d8dcc70251e8af811fce4524d57c2c0fd49a4/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c", size = 736829, upload-time = "2024-08-06T20:32:06.459Z" }, - { url = "https://files.pythonhosted.org/packages/51/16/6af8d6a6b210c8e54f1406a6b9481febf9c64a3109c541567e35a49aa2e7/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317", size = 764167, upload-time = "2024-08-06T20:32:08.338Z" }, - { url = "https://files.pythonhosted.org/packages/75/e4/2c27590dfc9992f73aabbeb9241ae20220bd9452df27483b6e56d3975cc5/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85", size = 762952, upload-time = "2024-08-06T20:32:14.124Z" }, - { url = "https://files.pythonhosted.org/packages/9b/97/ecc1abf4a823f5ac61941a9c00fe501b02ac3ab0e373c3857f7d4b83e2b6/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4", size = 735301, upload-time = "2024-08-06T20:32:16.17Z" }, - { url = "https://files.pythonhosted.org/packages/45/73/0f49dacd6e82c9430e46f4a027baa4ca205e8b0a9dce1397f44edc23559d/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e", size = 756638, upload-time = "2024-08-06T20:32:18.555Z" }, - { url = "https://files.pythonhosted.org/packages/22/5f/956f0f9fc65223a58fbc14459bf34b4cc48dec52e00535c79b8db361aabd/PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5", size = 143850, upload-time = "2024-08-06T20:32:19.889Z" }, - { url = "https://files.pythonhosted.org/packages/ed/23/8da0bbe2ab9dcdd11f4f4557ccaf95c10b9811b13ecced089d43ce59c3c8/PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44", size = 161980, upload-time = "2024-08-06T20:32:21.273Z" }, - { url = "https://files.pythonhosted.org/packages/86/0c/c581167fc46d6d6d7ddcfb8c843a4de25bdd27e4466938109ca68492292c/PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab", size = 183873, upload-time = "2024-08-06T20:32:25.131Z" }, - { url = "https://files.pythonhosted.org/packages/a8/0c/38374f5bb272c051e2a69281d71cba6fdb983413e6758b84482905e29a5d/PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725", size = 173302, upload-time = "2024-08-06T20:32:26.511Z" }, - { url = "https://files.pythonhosted.org/packages/c3/93/9916574aa8c00aa06bbac729972eb1071d002b8e158bd0e83a3b9a20a1f7/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5", size = 739154, upload-time = "2024-08-06T20:32:28.363Z" }, - { url = "https://files.pythonhosted.org/packages/95/0f/b8938f1cbd09739c6da569d172531567dbcc9789e0029aa070856f123984/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425", size = 766223, upload-time = "2024-08-06T20:32:30.058Z" }, - { url = "https://files.pythonhosted.org/packages/b9/2b/614b4752f2e127db5cc206abc23a8c19678e92b23c3db30fc86ab731d3bd/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476", size = 767542, upload-time = "2024-08-06T20:32:31.881Z" }, - { url = "https://files.pythonhosted.org/packages/d4/00/dd137d5bcc7efea1836d6264f049359861cf548469d18da90cd8216cf05f/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48", size = 731164, upload-time = "2024-08-06T20:32:37.083Z" }, - { url = "https://files.pythonhosted.org/packages/c9/1f/4f998c900485e5c0ef43838363ba4a9723ac0ad73a9dc42068b12aaba4e4/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b", size = 756611, upload-time = "2024-08-06T20:32:38.898Z" }, - { url = "https://files.pythonhosted.org/packages/df/d1/f5a275fdb252768b7a11ec63585bc38d0e87c9e05668a139fea92b80634c/PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4", size = 140591, upload-time = "2024-08-06T20:32:40.241Z" }, - { url = "https://files.pythonhosted.org/packages/0c/e8/4f648c598b17c3d06e8753d7d13d57542b30d56e6c2dedf9c331ae56312e/PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8", size = 156338, upload-time = "2024-08-06T20:32:41.93Z" }, +version = "6.0.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/05/8e/961c0007c59b8dd7729d542c61a4d537767a59645b82a0b521206e1e25c2/pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f", size = 130960, upload-time = "2025-09-25T21:33:16.546Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f4/a0/39350dd17dd6d6c6507025c0e53aef67a9293a6d37d3511f23ea510d5800/pyyaml-6.0.3-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:214ed4befebe12df36bcc8bc2b64b396ca31be9304b8f59e25c11cf94a4c033b", size = 184227, upload-time = "2025-09-25T21:31:46.04Z" }, + { url = "https://files.pythonhosted.org/packages/05/14/52d505b5c59ce73244f59c7a50ecf47093ce4765f116cdb98286a71eeca2/pyyaml-6.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:02ea2dfa234451bbb8772601d7b8e426c2bfa197136796224e50e35a78777956", size = 174019, upload-time = "2025-09-25T21:31:47.706Z" }, + { url = "https://files.pythonhosted.org/packages/43/f7/0e6a5ae5599c838c696adb4e6330a59f463265bfa1e116cfd1fbb0abaaae/pyyaml-6.0.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b30236e45cf30d2b8e7b3e85881719e98507abed1011bf463a8fa23e9c3e98a8", size = 740646, upload-time = "2025-09-25T21:31:49.21Z" }, + { url = "https://files.pythonhosted.org/packages/2f/3a/61b9db1d28f00f8fd0ae760459a5c4bf1b941baf714e207b6eb0657d2578/pyyaml-6.0.3-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:66291b10affd76d76f54fad28e22e51719ef9ba22b29e1d7d03d6777a9174198", size = 840793, upload-time = "2025-09-25T21:31:50.735Z" }, + { url = "https://files.pythonhosted.org/packages/7a/1e/7acc4f0e74c4b3d9531e24739e0ab832a5edf40e64fbae1a9c01941cabd7/pyyaml-6.0.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9c7708761fccb9397fe64bbc0395abcae8c4bf7b0eac081e12b809bf47700d0b", size = 770293, upload-time = "2025-09-25T21:31:51.828Z" }, + { url = "https://files.pythonhosted.org/packages/8b/ef/abd085f06853af0cd59fa5f913d61a8eab65d7639ff2a658d18a25d6a89d/pyyaml-6.0.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:418cf3f2111bc80e0933b2cd8cd04f286338bb88bdc7bc8e6dd775ebde60b5e0", size = 732872, upload-time = "2025-09-25T21:31:53.282Z" }, + { url = "https://files.pythonhosted.org/packages/1f/15/2bc9c8faf6450a8b3c9fc5448ed869c599c0a74ba2669772b1f3a0040180/pyyaml-6.0.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:5e0b74767e5f8c593e8c9b5912019159ed0533c70051e9cce3e8b6aa699fcd69", size = 758828, upload-time = "2025-09-25T21:31:54.807Z" }, + { url = "https://files.pythonhosted.org/packages/a3/00/531e92e88c00f4333ce359e50c19b8d1de9fe8d581b1534e35ccfbc5f393/pyyaml-6.0.3-cp310-cp310-win32.whl", hash = "sha256:28c8d926f98f432f88adc23edf2e6d4921ac26fb084b028c733d01868d19007e", size = 142415, upload-time = "2025-09-25T21:31:55.885Z" }, + { url = "https://files.pythonhosted.org/packages/2a/fa/926c003379b19fca39dd4634818b00dec6c62d87faf628d1394e137354d4/pyyaml-6.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:bdb2c67c6c1390b63c6ff89f210c8fd09d9a1217a465701eac7316313c915e4c", size = 158561, upload-time = "2025-09-25T21:31:57.406Z" }, + { url = "https://files.pythonhosted.org/packages/6d/16/a95b6757765b7b031c9374925bb718d55e0a9ba8a1b6a12d25962ea44347/pyyaml-6.0.3-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:44edc647873928551a01e7a563d7452ccdebee747728c1080d881d68af7b997e", size = 185826, upload-time = "2025-09-25T21:31:58.655Z" }, + { url = "https://files.pythonhosted.org/packages/16/19/13de8e4377ed53079ee996e1ab0a9c33ec2faf808a4647b7b4c0d46dd239/pyyaml-6.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:652cb6edd41e718550aad172851962662ff2681490a8a711af6a4d288dd96824", size = 175577, upload-time = "2025-09-25T21:32:00.088Z" }, + { url = "https://files.pythonhosted.org/packages/0c/62/d2eb46264d4b157dae1275b573017abec435397aa59cbcdab6fc978a8af4/pyyaml-6.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:10892704fc220243f5305762e276552a0395f7beb4dbf9b14ec8fd43b57f126c", size = 775556, upload-time = "2025-09-25T21:32:01.31Z" }, + { url = "https://files.pythonhosted.org/packages/10/cb/16c3f2cf3266edd25aaa00d6c4350381c8b012ed6f5276675b9eba8d9ff4/pyyaml-6.0.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:850774a7879607d3a6f50d36d04f00ee69e7fc816450e5f7e58d7f17f1ae5c00", size = 882114, upload-time = "2025-09-25T21:32:03.376Z" }, + { url = "https://files.pythonhosted.org/packages/71/60/917329f640924b18ff085ab889a11c763e0b573da888e8404ff486657602/pyyaml-6.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b8bb0864c5a28024fac8a632c443c87c5aa6f215c0b126c449ae1a150412f31d", size = 806638, upload-time = "2025-09-25T21:32:04.553Z" }, + { url = "https://files.pythonhosted.org/packages/dd/6f/529b0f316a9fd167281a6c3826b5583e6192dba792dd55e3203d3f8e655a/pyyaml-6.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1d37d57ad971609cf3c53ba6a7e365e40660e3be0e5175fa9f2365a379d6095a", size = 767463, upload-time = "2025-09-25T21:32:06.152Z" }, + { url = "https://files.pythonhosted.org/packages/f2/6a/b627b4e0c1dd03718543519ffb2f1deea4a1e6d42fbab8021936a4d22589/pyyaml-6.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:37503bfbfc9d2c40b344d06b2199cf0e96e97957ab1c1b546fd4f87e53e5d3e4", size = 794986, upload-time = "2025-09-25T21:32:07.367Z" }, + { url = "https://files.pythonhosted.org/packages/45/91/47a6e1c42d9ee337c4839208f30d9f09caa9f720ec7582917b264defc875/pyyaml-6.0.3-cp311-cp311-win32.whl", hash = "sha256:8098f252adfa6c80ab48096053f512f2321f0b998f98150cea9bd23d83e1467b", size = 142543, upload-time = "2025-09-25T21:32:08.95Z" }, + { url = "https://files.pythonhosted.org/packages/da/e3/ea007450a105ae919a72393cb06f122f288ef60bba2dc64b26e2646fa315/pyyaml-6.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:9f3bfb4965eb874431221a3ff3fdcddc7e74e3b07799e0e84ca4a0f867d449bf", size = 158763, upload-time = "2025-09-25T21:32:09.96Z" }, + { url = "https://files.pythonhosted.org/packages/d1/33/422b98d2195232ca1826284a76852ad5a86fe23e31b009c9886b2d0fb8b2/pyyaml-6.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7f047e29dcae44602496db43be01ad42fc6f1cc0d8cd6c83d342306c32270196", size = 182063, upload-time = "2025-09-25T21:32:11.445Z" }, + { url = "https://files.pythonhosted.org/packages/89/a0/6cf41a19a1f2f3feab0e9c0b74134aa2ce6849093d5517a0c550fe37a648/pyyaml-6.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc09d0aa354569bc501d4e787133afc08552722d3ab34836a80547331bb5d4a0", size = 173973, upload-time = "2025-09-25T21:32:12.492Z" }, + { url = "https://files.pythonhosted.org/packages/ed/23/7a778b6bd0b9a8039df8b1b1d80e2e2ad78aa04171592c8a5c43a56a6af4/pyyaml-6.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9149cad251584d5fb4981be1ecde53a1ca46c891a79788c0df828d2f166bda28", size = 775116, upload-time = "2025-09-25T21:32:13.652Z" }, + { url = "https://files.pythonhosted.org/packages/65/30/d7353c338e12baef4ecc1b09e877c1970bd3382789c159b4f89d6a70dc09/pyyaml-6.0.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5fdec68f91a0c6739b380c83b951e2c72ac0197ace422360e6d5a959d8d97b2c", size = 844011, upload-time = "2025-09-25T21:32:15.21Z" }, + { url = "https://files.pythonhosted.org/packages/8b/9d/b3589d3877982d4f2329302ef98a8026e7f4443c765c46cfecc8858c6b4b/pyyaml-6.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ba1cc08a7ccde2d2ec775841541641e4548226580ab850948cbfda66a1befcdc", size = 807870, upload-time = "2025-09-25T21:32:16.431Z" }, + { url = "https://files.pythonhosted.org/packages/05/c0/b3be26a015601b822b97d9149ff8cb5ead58c66f981e04fedf4e762f4bd4/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8dc52c23056b9ddd46818a57b78404882310fb473d63f17b07d5c40421e47f8e", size = 761089, upload-time = "2025-09-25T21:32:17.56Z" }, + { url = "https://files.pythonhosted.org/packages/be/8e/98435a21d1d4b46590d5459a22d88128103f8da4c2d4cb8f14f2a96504e1/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41715c910c881bc081f1e8872880d3c650acf13dfa8214bad49ed4cede7c34ea", size = 790181, upload-time = "2025-09-25T21:32:18.834Z" }, + { url = "https://files.pythonhosted.org/packages/74/93/7baea19427dcfbe1e5a372d81473250b379f04b1bd3c4c5ff825e2327202/pyyaml-6.0.3-cp312-cp312-win32.whl", hash = "sha256:96b533f0e99f6579b3d4d4995707cf36df9100d67e0c8303a0c55b27b5f99bc5", size = 137658, upload-time = "2025-09-25T21:32:20.209Z" }, + { url = "https://files.pythonhosted.org/packages/86/bf/899e81e4cce32febab4fb42bb97dcdf66bc135272882d1987881a4b519e9/pyyaml-6.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:5fcd34e47f6e0b794d17de1b4ff496c00986e1c83f7ab2fb8fcfe9616ff7477b", size = 154003, upload-time = "2025-09-25T21:32:21.167Z" }, + { url = "https://files.pythonhosted.org/packages/1a/08/67bd04656199bbb51dbed1439b7f27601dfb576fb864099c7ef0c3e55531/pyyaml-6.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:64386e5e707d03a7e172c0701abfb7e10f0fb753ee1d773128192742712a98fd", size = 140344, upload-time = "2025-09-25T21:32:22.617Z" }, ] [[package]] @@ -3540,7 +11535,7 @@ name = "pyzmq" version = "27.1.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "cffi", marker = "implementation_name == 'pypy' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "cffi", marker = "implementation_name == 'pypy' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, ] sdist = { url = "https://files.pythonhosted.org/packages/04/0b/3c9baedbdf613ecaa7aa07027780b8867f57b6293b6ee50de316c9f3222b/pyzmq-27.1.0.tar.gz", hash = "sha256:ac0765e3d44455adb6ddbf4417dcce460fc40a05978c08efdf2948072f6db540", size = 281750, upload-time = "2025-09-08T23:10:18.157Z" } wheels = [ @@ -3588,67 +11583,48 @@ wheels = [ [[package]] name = "rapidfuzz" -version = "3.13.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ed/f6/6895abc3a3d056b9698da3199b04c0e56226d530ae44a470edabf8b664f0/rapidfuzz-3.13.0.tar.gz", hash = "sha256:d2eaf3839e52cbcc0accbe9817a67b4b0fcf70aaeb229cfddc1c28061f9ce5d8", size = 57904226, upload-time = "2025-04-03T20:38:51.226Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/de/27/ca10b3166024ae19a7e7c21f73c58dfd4b7fef7420e5497ee64ce6b73453/rapidfuzz-3.13.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:aafc42a1dc5e1beeba52cd83baa41372228d6d8266f6d803c16dbabbcc156255", size = 1998899, upload-time = "2025-04-03T20:35:08.764Z" }, - { url = "https://files.pythonhosted.org/packages/f0/38/c4c404b13af0315483a6909b3a29636e18e1359307fb74a333fdccb3730d/rapidfuzz-3.13.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:85c9a131a44a95f9cac2eb6e65531db014e09d89c4f18c7b1fa54979cb9ff1f3", size = 1449949, upload-time = "2025-04-03T20:35:11.26Z" }, - { url = "https://files.pythonhosted.org/packages/12/ae/15c71d68a6df6b8e24595421fdf5bcb305888318e870b7be8d935a9187ee/rapidfuzz-3.13.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7d7cec4242d30dd521ef91c0df872e14449d1dffc2a6990ede33943b0dae56c3", size = 1424199, upload-time = "2025-04-03T20:35:12.954Z" }, - { url = "https://files.pythonhosted.org/packages/dc/9a/765beb9e14d7b30d12e2d6019e8b93747a0bedbc1d0cce13184fa3825426/rapidfuzz-3.13.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e297c09972698c95649e89121e3550cee761ca3640cd005e24aaa2619175464e", size = 5352400, upload-time = "2025-04-03T20:35:15.421Z" }, - { url = "https://files.pythonhosted.org/packages/e2/b8/49479fe6f06b06cd54d6345ed16de3d1ac659b57730bdbe897df1e059471/rapidfuzz-3.13.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ef0f5f03f61b0e5a57b1df7beafd83df993fd5811a09871bad6038d08e526d0d", size = 1652465, upload-time = "2025-04-03T20:35:18.43Z" }, - { url = "https://files.pythonhosted.org/packages/6f/d8/08823d496b7dd142a7b5d2da04337df6673a14677cfdb72f2604c64ead69/rapidfuzz-3.13.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d8cf5f7cd6e4d5eb272baf6a54e182b2c237548d048e2882258336533f3f02b7", size = 1616590, upload-time = "2025-04-03T20:35:20.482Z" }, - { url = "https://files.pythonhosted.org/packages/38/d4/5cfbc9a997e544f07f301c54d42aac9e0d28d457d543169e4ec859b8ce0d/rapidfuzz-3.13.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9256218ac8f1a957806ec2fb9a6ddfc6c32ea937c0429e88cf16362a20ed8602", size = 3086956, upload-time = "2025-04-03T20:35:22.756Z" }, - { url = "https://files.pythonhosted.org/packages/25/1e/06d8932a72fa9576095234a15785136407acf8f9a7dbc8136389a3429da1/rapidfuzz-3.13.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e1bdd2e6d0c5f9706ef7595773a81ca2b40f3b33fd7f9840b726fb00c6c4eb2e", size = 2494220, upload-time = "2025-04-03T20:35:25.563Z" }, - { url = "https://files.pythonhosted.org/packages/03/16/5acf15df63119d5ca3d9a54b82807866ff403461811d077201ca351a40c3/rapidfuzz-3.13.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:5280be8fd7e2bee5822e254fe0a5763aa0ad57054b85a32a3d9970e9b09bbcbf", size = 7585481, upload-time = "2025-04-03T20:35:27.426Z" }, - { url = "https://files.pythonhosted.org/packages/e1/cf/ebade4009431ea8e715e59e882477a970834ddaacd1a670095705b86bd0d/rapidfuzz-3.13.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:fd742c03885db1fce798a1cd87a20f47f144ccf26d75d52feb6f2bae3d57af05", size = 2894842, upload-time = "2025-04-03T20:35:29.457Z" }, - { url = "https://files.pythonhosted.org/packages/a7/bd/0732632bd3f906bf613229ee1b7cbfba77515db714a0e307becfa8a970ae/rapidfuzz-3.13.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:5435fcac94c9ecf0504bf88a8a60c55482c32e18e108d6079a0089c47f3f8cf6", size = 3438517, upload-time = "2025-04-03T20:35:31.381Z" }, - { url = "https://files.pythonhosted.org/packages/83/89/d3bd47ec9f4b0890f62aea143a1e35f78f3d8329b93d9495b4fa8a3cbfc3/rapidfuzz-3.13.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:93a755266856599be4ab6346273f192acde3102d7aa0735e2f48b456397a041f", size = 4412773, upload-time = "2025-04-03T20:35:33.425Z" }, - { url = "https://files.pythonhosted.org/packages/b3/57/1a152a07883e672fc117c7f553f5b933f6e43c431ac3fd0e8dae5008f481/rapidfuzz-3.13.0-cp310-cp310-win32.whl", hash = "sha256:3abe6a4e8eb4cfc4cda04dd650a2dc6d2934cbdeda5def7e6fd1c20f6e7d2a0b", size = 1842334, upload-time = "2025-04-03T20:35:35.648Z" }, - { url = "https://files.pythonhosted.org/packages/a7/68/7248addf95b6ca51fc9d955161072285da3059dd1472b0de773cff910963/rapidfuzz-3.13.0-cp310-cp310-win_amd64.whl", hash = "sha256:e8ddb58961401da7d6f55f185512c0d6bd24f529a637078d41dd8ffa5a49c107", size = 1624392, upload-time = "2025-04-03T20:35:37.294Z" }, - { url = "https://files.pythonhosted.org/packages/68/23/f41c749f2c61ed1ed5575eaf9e73ef9406bfedbf20a3ffa438d15b5bf87e/rapidfuzz-3.13.0-cp310-cp310-win_arm64.whl", hash = "sha256:c523620d14ebd03a8d473c89e05fa1ae152821920c3ff78b839218ff69e19ca3", size = 865584, upload-time = "2025-04-03T20:35:39.005Z" }, - { url = "https://files.pythonhosted.org/packages/87/17/9be9eff5a3c7dfc831c2511262082c6786dca2ce21aa8194eef1cb71d67a/rapidfuzz-3.13.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d395a5cad0c09c7f096433e5fd4224d83b53298d53499945a9b0e5a971a84f3a", size = 1999453, upload-time = "2025-04-03T20:35:40.804Z" }, - { url = "https://files.pythonhosted.org/packages/75/67/62e57896ecbabe363f027d24cc769d55dd49019e576533ec10e492fcd8a2/rapidfuzz-3.13.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b7b3eda607a019169f7187328a8d1648fb9a90265087f6903d7ee3a8eee01805", size = 1450881, upload-time = "2025-04-03T20:35:42.734Z" }, - { url = "https://files.pythonhosted.org/packages/96/5c/691c5304857f3476a7b3df99e91efc32428cbe7d25d234e967cc08346c13/rapidfuzz-3.13.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:98e0bfa602e1942d542de077baf15d658bd9d5dcfe9b762aff791724c1c38b70", size = 1422990, upload-time = "2025-04-03T20:35:45.158Z" }, - { url = "https://files.pythonhosted.org/packages/46/81/7a7e78f977496ee2d613154b86b203d373376bcaae5de7bde92f3ad5a192/rapidfuzz-3.13.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bef86df6d59667d9655905b02770a0c776d2853971c0773767d5ef8077acd624", size = 5342309, upload-time = "2025-04-03T20:35:46.952Z" }, - { url = "https://files.pythonhosted.org/packages/51/44/12fdd12a76b190fe94bf38d252bb28ddf0ab7a366b943e792803502901a2/rapidfuzz-3.13.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fedd316c165beed6307bf754dee54d3faca2c47e1f3bcbd67595001dfa11e969", size = 1656881, upload-time = "2025-04-03T20:35:49.954Z" }, - { url = "https://files.pythonhosted.org/packages/27/ae/0d933e660c06fcfb087a0d2492f98322f9348a28b2cc3791a5dbadf6e6fb/rapidfuzz-3.13.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5158da7f2ec02a930be13bac53bb5903527c073c90ee37804090614cab83c29e", size = 1608494, upload-time = "2025-04-03T20:35:51.646Z" }, - { url = "https://files.pythonhosted.org/packages/3d/2c/4b2f8aafdf9400e5599b6ed2f14bc26ca75f5a923571926ccbc998d4246a/rapidfuzz-3.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b6f913ee4618ddb6d6f3e387b76e8ec2fc5efee313a128809fbd44e65c2bbb2", size = 3072160, upload-time = "2025-04-03T20:35:53.472Z" }, - { url = "https://files.pythonhosted.org/packages/60/7d/030d68d9a653c301114101c3003b31ce01cf2c3224034cd26105224cd249/rapidfuzz-3.13.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d25fdbce6459ccbbbf23b4b044f56fbd1158b97ac50994eaae2a1c0baae78301", size = 2491549, upload-time = "2025-04-03T20:35:55.391Z" }, - { url = "https://files.pythonhosted.org/packages/8e/cd/7040ba538fc6a8ddc8816a05ecf46af9988b46c148ddd7f74fb0fb73d012/rapidfuzz-3.13.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:25343ccc589a4579fbde832e6a1e27258bfdd7f2eb0f28cb836d6694ab8591fc", size = 7584142, upload-time = "2025-04-03T20:35:57.71Z" }, - { url = "https://files.pythonhosted.org/packages/c1/96/85f7536fbceb0aa92c04a1c37a3fc4fcd4e80649e9ed0fb585382df82edc/rapidfuzz-3.13.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:a9ad1f37894e3ffb76bbab76256e8a8b789657183870be11aa64e306bb5228fd", size = 2896234, upload-time = "2025-04-03T20:35:59.969Z" }, - { url = "https://files.pythonhosted.org/packages/55/fd/460e78438e7019f2462fe9d4ecc880577ba340df7974c8a4cfe8d8d029df/rapidfuzz-3.13.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:5dc71ef23845bb6b62d194c39a97bb30ff171389c9812d83030c1199f319098c", size = 3437420, upload-time = "2025-04-03T20:36:01.91Z" }, - { url = "https://files.pythonhosted.org/packages/cc/df/c3c308a106a0993befd140a414c5ea78789d201cf1dfffb8fd9749718d4f/rapidfuzz-3.13.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b7f4c65facdb94f44be759bbd9b6dda1fa54d0d6169cdf1a209a5ab97d311a75", size = 4410860, upload-time = "2025-04-03T20:36:04.352Z" }, - { url = "https://files.pythonhosted.org/packages/75/ee/9d4ece247f9b26936cdeaae600e494af587ce9bf8ddc47d88435f05cfd05/rapidfuzz-3.13.0-cp311-cp311-win32.whl", hash = "sha256:b5104b62711565e0ff6deab2a8f5dbf1fbe333c5155abe26d2cfd6f1849b6c87", size = 1843161, upload-time = "2025-04-03T20:36:06.802Z" }, - { url = "https://files.pythonhosted.org/packages/c9/5a/d00e1f63564050a20279015acb29ecaf41646adfacc6ce2e1e450f7f2633/rapidfuzz-3.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:9093cdeb926deb32a4887ebe6910f57fbcdbc9fbfa52252c10b56ef2efb0289f", size = 1629962, upload-time = "2025-04-03T20:36:09.133Z" }, - { url = "https://files.pythonhosted.org/packages/3b/74/0a3de18bc2576b794f41ccd07720b623e840fda219ab57091897f2320fdd/rapidfuzz-3.13.0-cp311-cp311-win_arm64.whl", hash = "sha256:f70f646751b6aa9d05be1fb40372f006cc89d6aad54e9d79ae97bd1f5fce5203", size = 866631, upload-time = "2025-04-03T20:36:11.022Z" }, - { url = "https://files.pythonhosted.org/packages/13/4b/a326f57a4efed8f5505b25102797a58e37ee11d94afd9d9422cb7c76117e/rapidfuzz-3.13.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a1a6a906ba62f2556372282b1ef37b26bca67e3d2ea957277cfcefc6275cca7", size = 1989501, upload-time = "2025-04-03T20:36:13.43Z" }, - { url = "https://files.pythonhosted.org/packages/b7/53/1f7eb7ee83a06c400089ec7cb841cbd581c2edd7a4b21eb2f31030b88daa/rapidfuzz-3.13.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2fd0975e015b05c79a97f38883a11236f5a24cca83aa992bd2558ceaa5652b26", size = 1445379, upload-time = "2025-04-03T20:36:16.439Z" }, - { url = "https://files.pythonhosted.org/packages/07/09/de8069a4599cc8e6d194e5fa1782c561151dea7d5e2741767137e2a8c1f0/rapidfuzz-3.13.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d4e13593d298c50c4f94ce453f757b4b398af3fa0fd2fde693c3e51195b7f69", size = 1405986, upload-time = "2025-04-03T20:36:18.447Z" }, - { url = "https://files.pythonhosted.org/packages/5d/77/d9a90b39c16eca20d70fec4ca377fbe9ea4c0d358c6e4736ab0e0e78aaf6/rapidfuzz-3.13.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ed6f416bda1c9133000009d84d9409823eb2358df0950231cc936e4bf784eb97", size = 5310809, upload-time = "2025-04-03T20:36:20.324Z" }, - { url = "https://files.pythonhosted.org/packages/1e/7d/14da291b0d0f22262d19522afaf63bccf39fc027c981233fb2137a57b71f/rapidfuzz-3.13.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1dc82b6ed01acb536b94a43996a94471a218f4d89f3fdd9185ab496de4b2a981", size = 1629394, upload-time = "2025-04-03T20:36:22.256Z" }, - { url = "https://files.pythonhosted.org/packages/b7/e4/79ed7e4fa58f37c0f8b7c0a62361f7089b221fe85738ae2dbcfb815e985a/rapidfuzz-3.13.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e9d824de871daa6e443b39ff495a884931970d567eb0dfa213d234337343835f", size = 1600544, upload-time = "2025-04-03T20:36:24.207Z" }, - { url = "https://files.pythonhosted.org/packages/4e/20/e62b4d13ba851b0f36370060025de50a264d625f6b4c32899085ed51f980/rapidfuzz-3.13.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d18228a2390375cf45726ce1af9d36ff3dc1f11dce9775eae1f1b13ac6ec50f", size = 3052796, upload-time = "2025-04-03T20:36:26.279Z" }, - { url = "https://files.pythonhosted.org/packages/cd/8d/55fdf4387dec10aa177fe3df8dbb0d5022224d95f48664a21d6b62a5299d/rapidfuzz-3.13.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9f5fe634c9482ec5d4a6692afb8c45d370ae86755e5f57aa6c50bfe4ca2bdd87", size = 2464016, upload-time = "2025-04-03T20:36:28.525Z" }, - { url = "https://files.pythonhosted.org/packages/9b/be/0872f6a56c0f473165d3b47d4170fa75263dc5f46985755aa9bf2bbcdea1/rapidfuzz-3.13.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:694eb531889f71022b2be86f625a4209c4049e74be9ca836919b9e395d5e33b3", size = 7556725, upload-time = "2025-04-03T20:36:30.629Z" }, - { url = "https://files.pythonhosted.org/packages/5d/f3/6c0750e484d885a14840c7a150926f425d524982aca989cdda0bb3bdfa57/rapidfuzz-3.13.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:11b47b40650e06147dee5e51a9c9ad73bb7b86968b6f7d30e503b9f8dd1292db", size = 2859052, upload-time = "2025-04-03T20:36:32.836Z" }, - { url = "https://files.pythonhosted.org/packages/6f/98/5a3a14701b5eb330f444f7883c9840b43fb29c575e292e09c90a270a6e07/rapidfuzz-3.13.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:98b8107ff14f5af0243f27d236bcc6e1ef8e7e3b3c25df114e91e3a99572da73", size = 3390219, upload-time = "2025-04-03T20:36:35.062Z" }, - { url = "https://files.pythonhosted.org/packages/e9/7d/f4642eaaeb474b19974332f2a58471803448be843033e5740965775760a5/rapidfuzz-3.13.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b836f486dba0aceb2551e838ff3f514a38ee72b015364f739e526d720fdb823a", size = 4377924, upload-time = "2025-04-03T20:36:37.363Z" }, - { url = "https://files.pythonhosted.org/packages/8e/83/fa33f61796731891c3e045d0cbca4436a5c436a170e7f04d42c2423652c3/rapidfuzz-3.13.0-cp312-cp312-win32.whl", hash = "sha256:4671ee300d1818d7bdfd8fa0608580d7778ba701817216f0c17fb29e6b972514", size = 1823915, upload-time = "2025-04-03T20:36:39.451Z" }, - { url = "https://files.pythonhosted.org/packages/03/25/5ee7ab6841ca668567d0897905eebc79c76f6297b73bf05957be887e9c74/rapidfuzz-3.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:6e2065f68fb1d0bf65adc289c1bdc45ba7e464e406b319d67bb54441a1b9da9e", size = 1616985, upload-time = "2025-04-03T20:36:41.631Z" }, - { url = "https://files.pythonhosted.org/packages/76/5e/3f0fb88db396cb692aefd631e4805854e02120a2382723b90dcae720bcc6/rapidfuzz-3.13.0-cp312-cp312-win_arm64.whl", hash = "sha256:65cc97c2fc2c2fe23586599686f3b1ceeedeca8e598cfcc1b7e56dc8ca7e2aa7", size = 860116, upload-time = "2025-04-03T20:36:43.915Z" }, - { url = "https://files.pythonhosted.org/packages/d5/e1/f5d85ae3c53df6f817ca70dbdd37c83f31e64caced5bb867bec6b43d1fdf/rapidfuzz-3.13.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:fe5790a36d33a5d0a6a1f802aa42ecae282bf29ac6f7506d8e12510847b82a45", size = 1904437, upload-time = "2025-04-03T20:38:00.255Z" }, - { url = "https://files.pythonhosted.org/packages/db/d7/ded50603dddc5eb182b7ce547a523ab67b3bf42b89736f93a230a398a445/rapidfuzz-3.13.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:cdb33ee9f8a8e4742c6b268fa6bd739024f34651a06b26913381b1413ebe7590", size = 1383126, upload-time = "2025-04-03T20:38:02.676Z" }, - { url = "https://files.pythonhosted.org/packages/c4/48/6f795e793babb0120b63a165496d64f989b9438efbeed3357d9a226ce575/rapidfuzz-3.13.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c99b76b93f7b495eee7dcb0d6a38fb3ce91e72e99d9f78faa5664a881cb2b7d", size = 1365565, upload-time = "2025-04-03T20:38:06.646Z" }, - { url = "https://files.pythonhosted.org/packages/f0/50/0062a959a2d72ed17815824e40e2eefdb26f6c51d627389514510a7875f3/rapidfuzz-3.13.0-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6af42f2ede8b596a6aaf6d49fdee3066ca578f4856b85ab5c1e2145de367a12d", size = 5251719, upload-time = "2025-04-03T20:38:09.191Z" }, - { url = "https://files.pythonhosted.org/packages/e7/02/bd8b70cd98b7a88e1621264778ac830c9daa7745cd63e838bd773b1aeebd/rapidfuzz-3.13.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c0efa73afbc5b265aca0d8a467ae2a3f40d6854cbe1481cb442a62b7bf23c99", size = 2991095, upload-time = "2025-04-03T20:38:12.554Z" }, - { url = "https://files.pythonhosted.org/packages/9f/8d/632d895cdae8356826184864d74a5f487d40cb79f50a9137510524a1ba86/rapidfuzz-3.13.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7ac21489de962a4e2fc1e8f0b0da4aa1adc6ab9512fd845563fecb4b4c52093a", size = 1553888, upload-time = "2025-04-03T20:38:15.357Z" }, - { url = "https://files.pythonhosted.org/packages/88/df/6060c5a9c879b302bd47a73fc012d0db37abf6544c57591bcbc3459673bd/rapidfuzz-3.13.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:1ba007f4d35a45ee68656b2eb83b8715e11d0f90e5b9f02d615a8a321ff00c27", size = 1905935, upload-time = "2025-04-03T20:38:18.07Z" }, - { url = "https://files.pythonhosted.org/packages/a2/6c/a0b819b829e20525ef1bd58fc776fb8d07a0c38d819e63ba2b7c311a2ed4/rapidfuzz-3.13.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d7a217310429b43be95b3b8ad7f8fc41aba341109dc91e978cd7c703f928c58f", size = 1383714, upload-time = "2025-04-03T20:38:20.628Z" }, - { url = "https://files.pythonhosted.org/packages/6a/c1/3da3466cc8a9bfb9cd345ad221fac311143b6a9664b5af4adb95b5e6ce01/rapidfuzz-3.13.0-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:558bf526bcd777de32b7885790a95a9548ffdcce68f704a81207be4a286c1095", size = 1367329, upload-time = "2025-04-03T20:38:23.01Z" }, - { url = "https://files.pythonhosted.org/packages/da/f0/9f2a9043bfc4e66da256b15d728c5fc2d865edf0028824337f5edac36783/rapidfuzz-3.13.0-pp311-pypy311_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:202a87760f5145140d56153b193a797ae9338f7939eb16652dd7ff96f8faf64c", size = 5251057, upload-time = "2025-04-03T20:38:25.52Z" }, - { url = "https://files.pythonhosted.org/packages/6a/ff/af2cb1d8acf9777d52487af5c6b34ce9d13381a753f991d95ecaca813407/rapidfuzz-3.13.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cfcccc08f671646ccb1e413c773bb92e7bba789e3a1796fd49d23c12539fe2e4", size = 2992401, upload-time = "2025-04-03T20:38:28.196Z" }, - { url = "https://files.pythonhosted.org/packages/c1/c5/c243b05a15a27b946180db0d1e4c999bef3f4221505dff9748f1f6c917be/rapidfuzz-3.13.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:1f219f1e3c3194d7a7de222f54450ce12bc907862ff9a8962d83061c1f923c86", size = 1553782, upload-time = "2025-04-03T20:38:30.778Z" }, +version = "3.14.5" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2c/21/ef6157213316e85790041254259907eb722e00b03480256c0545d98acd33/rapidfuzz-3.14.5.tar.gz", hash = "sha256:ba10ac57884ce82112f7ed910b67e7fb6072d8ef2c06e30dc63c0f604a112e0e", size = 57901753, upload-time = "2026-04-07T11:16:31.931Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4f/b1/d6d6e7737fe3d0eb2ac2ac337686420d538f83f28495acc3cc32201c0dbf/rapidfuzz-3.14.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:071d96b957a33b9296b9284b6350a0fb6d030b154a04efd7c15e56b98b79a517", size = 1953508, upload-time = "2026-04-07T11:13:37.733Z" }, + { url = "https://files.pythonhosted.org/packages/2b/7b/94c1c953ac818bdd88b43213a9d38e4a41e953b786af3c3b2444d4a8f96d/rapidfuzz-3.14.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:667f40fe9c81ad129b198d236881b00dd9e8314d9cc72d03c3e16bdfe5879051", size = 1160895, upload-time = "2026-04-07T11:13:39.278Z" }, + { url = "https://files.pythonhosted.org/packages/7f/60/a67a7ca7c2532c6c1a4b5cd797917780eed43798b82c98b6df734a086c95/rapidfuzz-3.14.5-cp310-cp310-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f9fff308486bbd2c8c24f25e8e152c7594d3fe8db265a2d6a1ce24d58671127f", size = 1382245, upload-time = "2026-04-07T11:13:41.054Z" }, + { url = "https://files.pythonhosted.org/packages/95/ff/a42c9ce9f9e90ceb5b51136e0b8e8e6e5113ba0b45d986effbd671e7dddf/rapidfuzz-3.14.5-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dfa552338f51aec280f17b02d28bace1e162d1a84ccd80e3339a57f98aedb56b", size = 3163974, upload-time = "2026-04-07T11:13:42.662Z" }, + { url = "https://files.pythonhosted.org/packages/e3/3c/11e2d41075e6e48b7dad373631b379b7e40491f71d5412c5a98d3c58f60f/rapidfuzz-3.14.5-cp310-cp310-manylinux_2_39_riscv64.whl", hash = "sha256:068b3e965ca9d9ee4debe40001ae7c3938ba646308afd33cf0c66618147db65c", size = 1475540, upload-time = "2026-04-07T11:13:44.687Z" }, + { url = "https://files.pythonhosted.org/packages/29/fa/09be143dcc22c79f09cf90168a574725dbda49f02cbbd55d0447da8bec86/rapidfuzz-3.14.5-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:88b7d31ff1cc5e9bc0e4406e6b1fa00b6d37163d50bb58091e9b976ff1129faa", size = 2404128, upload-time = "2026-04-07T11:13:46.641Z" }, + { url = "https://files.pythonhosted.org/packages/32/f9/1aeb504cdcfde42881825e9c86f48238d4e01ba8a1530491e82eb17e5689/rapidfuzz-3.14.5-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:eacb434410b8d9ca99a8d42352ef085cf423e3c76c1f0b86be2fcba3bff2952c", size = 2508455, upload-time = "2026-04-07T11:13:48.726Z" }, + { url = "https://files.pythonhosted.org/packages/10/8e/b1b5eed8d887a29b0e18fd3222c46ca60fddfb528e7e1c41267ce42d5522/rapidfuzz-3.14.5-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:649712823f3abcdc48427147a5384fac15623ba435d0013959b52e6462521397", size = 4274060, upload-time = "2026-04-07T11:13:50.805Z" }, + { url = "https://files.pythonhosted.org/packages/e3/c4/7e5b0353693d4f47b8b0f96e941efc377cfb2034b67ef92d082ac4441a0f/rapidfuzz-3.14.5-cp310-cp310-win32.whl", hash = "sha256:13cb79c23ef5516e4c4e3830877be8b19aa75203636be1163d690d37803f6504", size = 1727457, upload-time = "2026-04-07T11:13:52.45Z" }, + { url = "https://files.pythonhosted.org/packages/d9/6e/f530a39b946fa71c009bc9c81fdb6b48a77bbc57ee8572ac0302b3bf6308/rapidfuzz-3.14.5-cp310-cp310-win_amd64.whl", hash = "sha256:f2073495a7f9b75e57e600747ac09510d67683fd64d3228e009740b7ef88f9fe", size = 1544657, upload-time = "2026-04-07T11:13:54.952Z" }, + { url = "https://files.pythonhosted.org/packages/bc/01/02fa075f9f59ff766d374fecbd042b3ac9782dcd5abc52d909a54f587eeb/rapidfuzz-3.14.5-cp310-cp310-win_arm64.whl", hash = "sha256:8166efddea49fdbc61185559f47593239e4794fd7c9044dd5a789d1a90af852d", size = 816587, upload-time = "2026-04-07T11:13:56.418Z" }, + { url = "https://files.pythonhosted.org/packages/e1/f9/3c41a7be8855803f4f6c713b472226a98d31d41869d98f64f4ca790510d6/rapidfuzz-3.14.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e251126d48615e1f02b4a178f2cd0cd4f0332b8a019c01a2e10480f7552554b4", size = 1952372, upload-time = "2026-04-07T11:13:58.32Z" }, + { url = "https://files.pythonhosted.org/packages/9e/89/c2557e37531d03465193bff0ab9de70b468420a807d71a26a65100635459/rapidfuzz-3.14.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5ab449c9abd0d4e1f8145dce0798a4c822a1a1933d613c764a641bea88b8bdab", size = 1159782, upload-time = "2026-04-07T11:14:00.127Z" }, + { url = "https://files.pythonhosted.org/packages/1a/b2/ffeeb7eca1a897d51b998f4c0ef0281696c3b06abcca4f88f9def708ffe1/rapidfuzz-3.14.5-cp311-cp311-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cb2829fedd672dd7107267189dabe2bbe07972801d636014417c6861eb89e358", size = 1383677, upload-time = "2026-04-07T11:14:01.696Z" }, + { url = "https://files.pythonhosted.org/packages/6b/d0/4539e42a2d596e068f7738f279638a4a74edd1fbb6f8594e2458058979c6/rapidfuzz-3.14.5-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3d50e5861872935fece391351cbb5ba21d1bced277cf5e1143d207a0a35f1925", size = 3168906, upload-time = "2026-04-07T11:14:03.29Z" }, + { url = "https://files.pythonhosted.org/packages/5e/1c/3ec897eb9d8b05308aa8ef6ae4ed64b088ad521a3f9d8ff469e7e97bc2b0/rapidfuzz-3.14.5-cp311-cp311-manylinux_2_39_riscv64.whl", hash = "sha256:7092a216728f80c960bd6b3807275d1ee318b168986bd5dc523349581d4890b8", size = 1478176, upload-time = "2026-04-07T11:14:04.94Z" }, + { url = "https://files.pythonhosted.org/packages/ab/ba/970c03a12ce20a5399e22afe9f8932fd4cd1265b8a8461d0e63b00eb4eae/rapidfuzz-3.14.5-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9669753caef7fdc6529f6adcc5883ed98d65976445d9322e7dbdb6b697feee13", size = 2402441, upload-time = "2026-04-07T11:14:07.228Z" }, + { url = "https://files.pythonhosted.org/packages/81/93/61d351cae60c1d0e21ba5ff1a1015ad045539ed215da9d6e302204ed887a/rapidfuzz-3.14.5-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:823b1b9d9230809d8edcc18872770764bfe8ef4357995e16744047c8ccf0e489", size = 2511628, upload-time = "2026-04-07T11:14:09.234Z" }, + { url = "https://files.pythonhosted.org/packages/87/52/374d2d4f60fd98155142a869323aa221e30868cfa1f15171a0f64070c247/rapidfuzz-3.14.5-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f0b2af76b7e7060c09e1a0dfa9410eb19369cbe6164509bff2ef94094b54d2b6", size = 4275480, upload-time = "2026-04-07T11:14:11.332Z" }, + { url = "https://files.pythonhosted.org/packages/d8/04/82e7989bc9ec20a15b720a335c5cb6b0724bf6582013898f90a3280cfccd/rapidfuzz-3.14.5-cp311-cp311-win32.whl", hash = "sha256:c5801a89604c65ab4cc9e91b23bc4076d0ca80efd8c976fb63843d7879a85d7f", size = 1725627, upload-time = "2026-04-07T11:14:13.217Z" }, + { url = "https://files.pythonhosted.org/packages/b9/b5/eca8ac5609bc9bcb02bb6ff87fa5983cc92b8772d66a431556ab8a8c178f/rapidfuzz-3.14.5-cp311-cp311-win_amd64.whl", hash = "sha256:d7ca16637c0ede8243f84074044bd0b2335a0341421f8227c85756de2d18c819", size = 1545977, upload-time = "2026-04-07T11:14:14.766Z" }, + { url = "https://files.pythonhosted.org/packages/ca/e1/dbf318de28f65fa2cdd0a9dfbdee380f8199eb83b19259bc4f8592551b4e/rapidfuzz-3.14.5-cp311-cp311-win_arm64.whl", hash = "sha256:8c90cdf8516d9057e502aa6003cea71cf5ec27cc44699ca52412b502a04761bb", size = 816827, upload-time = "2026-04-07T11:14:16.788Z" }, + { url = "https://files.pythonhosted.org/packages/d3/e3/574435c6aafb80254c191ef40d7aca2cb2bb97a095ec9395e9fa59ac307a/rapidfuzz-3.14.5-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:0d3378f471ef440473a396ce2f8e97ee12f89a78b495540e0a5617bbfe895638", size = 1944601, upload-time = "2026-04-07T11:14:18.771Z" }, + { url = "https://files.pythonhosted.org/packages/d0/1f/fbad3102a255ecc112ce9a7e779bacab7fd14398217be8868dc9082ba363/rapidfuzz-3.14.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1e910eebca9fd0eba245c0555e764597e8a0cccb673a92da2dc2397050725f48", size = 1164293, upload-time = "2026-04-07T11:14:20.534Z" }, + { url = "https://files.pythonhosted.org/packages/88/37/a3eb7ff6121ed3a5f199a8c38cc86c8e481816f879cb0e0b738b078c9a7e/rapidfuzz-3.14.5-cp312-cp312-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:01550fe5f60fd176aa66b7611289d46dc4aa4b1b904874c7b6d1d54e581c5ec1", size = 1371999, upload-time = "2026-04-07T11:14:22.63Z" }, + { url = "https://files.pythonhosted.org/packages/79/72/97a9728c711c7c1b06e107d3f0623880fb4ef90e147ed13c551a1730e7cc/rapidfuzz-3.14.5-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:48bee0b91bebfaec41e1081e351000659ab7570cc4598d617aa04d5bf827f9e6", size = 3145715, upload-time = "2026-04-07T11:14:24.508Z" }, + { url = "https://files.pythonhosted.org/packages/ed/54/d5caabbea233ac90c286c87c260e49d7641467e87438a18d858e41c82e91/rapidfuzz-3.14.5-cp312-cp312-manylinux_2_39_riscv64.whl", hash = "sha256:7e580cb04ad849ae9b786fa21383c6b994b6e6c1444ad1cb9f22392759d72741", size = 1456304, upload-time = "2026-04-07T11:14:26.515Z" }, + { url = "https://files.pythonhosted.org/packages/fc/a7/2d1a81250ac8c01a0100c026018e76f0e7a097ff63e4c553e02a6938c6fb/rapidfuzz-3.14.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:09d6c9ba091854f07817055d795d604179c12a8f308ba4c7d56f3719dfea1646", size = 2389089, upload-time = "2026-04-07T11:14:28.635Z" }, + { url = "https://files.pythonhosted.org/packages/65/0d/c47c3872203ae88e6506997c0b576ad731f5261daa25d559be09c9756658/rapidfuzz-3.14.5-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:1e989f86113be66574113b9c7bdf4793f3f863d248e47d911b355e05ca6b6b10", size = 2493404, upload-time = "2026-04-07T11:14:30.577Z" }, + { url = "https://files.pythonhosted.org/packages/8f/2f/71e0a5a3130792146c8a200a2dd1e52aa16f7c1074012e17f2601eea9a90/rapidfuzz-3.14.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0ebd1a18e2e47bc0b292a07e6ed9c3642f8aaa672d12253885f599b50807a4f9", size = 4251709, upload-time = "2026-04-07T11:14:32.451Z" }, + { url = "https://files.pythonhosted.org/packages/86/45/d39874901abacef325adb5b34ae416817c8486dfb4fb87c7a9b74ec5b072/rapidfuzz-3.14.5-cp312-cp312-win32.whl", hash = "sha256:9981d38a703b86f0e315a3cd229fd1906fe1d91c989ed121fb975b3c849f89f5", size = 1710069, upload-time = "2026-04-07T11:14:34.37Z" }, + { url = "https://files.pythonhosted.org/packages/85/0b/f65572c53de8a1c704bda707f63a447b67bdbe95d7cdc70d18885e191df5/rapidfuzz-3.14.5-cp312-cp312-win_amd64.whl", hash = "sha256:d8375e3da319593389727c3187ccaf3e0e84199accc530866b8e0f2b79af05e9", size = 1540630, upload-time = "2026-04-07T11:14:36.287Z" }, + { url = "https://files.pythonhosted.org/packages/5e/c3/143be3a578f989758cae516f3270d5cbb49783a7bfdf57cc27a670e00456/rapidfuzz-3.14.5-cp312-cp312-win_arm64.whl", hash = "sha256:478b59bb018a6780d73f33e38d0b3ec5e968a6c1ed42876b993dd456b7aa20e8", size = 813137, upload-time = "2026-04-07T11:14:38.289Z" }, + { url = "https://files.pythonhosted.org/packages/d9/ee/e71853bf82846c5c2174b924b71d8e8099fb05ff87c958a720380b434ba3/rapidfuzz-3.14.5-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:578e6051f6d5e6200c259b47a103cf06bb875ab5814d17333fc0b5c290b22f4c", size = 1888603, upload-time = "2026-04-07T11:16:18.223Z" }, + { url = "https://files.pythonhosted.org/packages/36/82/40f67b730f32be2ebad9f62add1571c754f52249254b2e88af094b907eee/rapidfuzz-3.14.5-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:fbf1b8bb2695415b347f3727da1addca2acb82c9b97ac86bebf8b1bead1eb12d", size = 1120599, upload-time = "2026-04-07T11:16:20.682Z" }, + { url = "https://files.pythonhosted.org/packages/ef/9f/a3635cc4ec8fc6e14b46e7db1f7f8763d8c4bef33dcc124eea2e6cb2c8f3/rapidfuzz-3.14.5-pp311-pypy311_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8f4a8f5cc84c7ad6bffa0e9947b33eb343ad66e6b53e94fe54378a5508c5ed53", size = 1348524, upload-time = "2026-04-07T11:16:23.451Z" }, + { url = "https://files.pythonhosted.org/packages/cc/1b/2b229520f0b48464cfcd7aa758f74551d12c9bc4ab544022a60210aab064/rapidfuzz-3.14.5-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:97c6d85283629646fa87acc22c66b30ea9d4de7f6fdf887daa2e30fa041829b5", size = 3099302, upload-time = "2026-04-07T11:16:25.858Z" }, + { url = "https://files.pythonhosted.org/packages/aa/b5/363906b1064fc6fe611783a61764927bbd91919aaaabe8cba82151ca93ef/rapidfuzz-3.14.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:dfef96543ced67d9513a422755db422ae1dc34dade0a1485e0b43e7342ed3ebf", size = 1509889, upload-time = "2026-04-07T11:16:28.487Z" }, ] [[package]] @@ -3667,61 +11643,64 @@ wheels = [ [[package]] name = "regex" -version = "2024.11.6" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/8e/5f/bd69653fbfb76cf8604468d3b4ec4c403197144c7bfe0e6a5fc9e02a07cb/regex-2024.11.6.tar.gz", hash = "sha256:7ab159b063c52a0333c884e4679f8d7a85112ee3078fe3d9004b2dd875585519", size = 399494, upload-time = "2024-11-06T20:12:31.635Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/95/3c/4651f6b130c6842a8f3df82461a8950f923925db8b6961063e82744bddcc/regex-2024.11.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ff590880083d60acc0433f9c3f713c51f7ac6ebb9adf889c79a261ecf541aa91", size = 482674, upload-time = "2024-11-06T20:08:57.575Z" }, - { url = "https://files.pythonhosted.org/packages/15/51/9f35d12da8434b489c7b7bffc205c474a0a9432a889457026e9bc06a297a/regex-2024.11.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:658f90550f38270639e83ce492f27d2c8d2cd63805c65a13a14d36ca126753f0", size = 287684, upload-time = "2024-11-06T20:08:59.787Z" }, - { url = "https://files.pythonhosted.org/packages/bd/18/b731f5510d1b8fb63c6b6d3484bfa9a59b84cc578ac8b5172970e05ae07c/regex-2024.11.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:164d8b7b3b4bcb2068b97428060b2a53be050085ef94eca7f240e7947f1b080e", size = 284589, upload-time = "2024-11-06T20:09:01.896Z" }, - { url = "https://files.pythonhosted.org/packages/78/a2/6dd36e16341ab95e4c6073426561b9bfdeb1a9c9b63ab1b579c2e96cb105/regex-2024.11.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3660c82f209655a06b587d55e723f0b813d3a7db2e32e5e7dc64ac2a9e86fde", size = 782511, upload-time = "2024-11-06T20:09:04.062Z" }, - { url = "https://files.pythonhosted.org/packages/1b/2b/323e72d5d2fd8de0d9baa443e1ed70363ed7e7b2fb526f5950c5cb99c364/regex-2024.11.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d22326fcdef5e08c154280b71163ced384b428343ae16a5ab2b3354aed12436e", size = 821149, upload-time = "2024-11-06T20:09:06.237Z" }, - { url = "https://files.pythonhosted.org/packages/90/30/63373b9ea468fbef8a907fd273e5c329b8c9535fee36fc8dba5fecac475d/regex-2024.11.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f1ac758ef6aebfc8943560194e9fd0fa18bcb34d89fd8bd2af18183afd8da3a2", size = 809707, upload-time = "2024-11-06T20:09:07.715Z" }, - { url = "https://files.pythonhosted.org/packages/f2/98/26d3830875b53071f1f0ae6d547f1d98e964dd29ad35cbf94439120bb67a/regex-2024.11.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:997d6a487ff00807ba810e0f8332c18b4eb8d29463cfb7c820dc4b6e7562d0cf", size = 781702, upload-time = "2024-11-06T20:09:10.101Z" }, - { url = "https://files.pythonhosted.org/packages/87/55/eb2a068334274db86208ab9d5599ffa63631b9f0f67ed70ea7c82a69bbc8/regex-2024.11.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:02a02d2bb04fec86ad61f3ea7f49c015a0681bf76abb9857f945d26159d2968c", size = 771976, upload-time = "2024-11-06T20:09:11.566Z" }, - { url = "https://files.pythonhosted.org/packages/74/c0/be707bcfe98254d8f9d2cff55d216e946f4ea48ad2fd8cf1428f8c5332ba/regex-2024.11.6-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f02f93b92358ee3f78660e43b4b0091229260c5d5c408d17d60bf26b6c900e86", size = 697397, upload-time = "2024-11-06T20:09:13.119Z" }, - { url = "https://files.pythonhosted.org/packages/49/dc/bb45572ceb49e0f6509f7596e4ba7031f6819ecb26bc7610979af5a77f45/regex-2024.11.6-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:06eb1be98df10e81ebaded73fcd51989dcf534e3c753466e4b60c4697a003b67", size = 768726, upload-time = "2024-11-06T20:09:14.85Z" }, - { url = "https://files.pythonhosted.org/packages/5a/db/f43fd75dc4c0c2d96d0881967897926942e935d700863666f3c844a72ce6/regex-2024.11.6-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:040df6fe1a5504eb0f04f048e6d09cd7c7110fef851d7c567a6b6e09942feb7d", size = 775098, upload-time = "2024-11-06T20:09:16.504Z" }, - { url = "https://files.pythonhosted.org/packages/99/d7/f94154db29ab5a89d69ff893159b19ada89e76b915c1293e98603d39838c/regex-2024.11.6-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:fdabbfc59f2c6edba2a6622c647b716e34e8e3867e0ab975412c5c2f79b82da2", size = 839325, upload-time = "2024-11-06T20:09:18.698Z" }, - { url = "https://files.pythonhosted.org/packages/f7/17/3cbfab1f23356fbbf07708220ab438a7efa1e0f34195bf857433f79f1788/regex-2024.11.6-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:8447d2d39b5abe381419319f942de20b7ecd60ce86f16a23b0698f22e1b70008", size = 843277, upload-time = "2024-11-06T20:09:21.725Z" }, - { url = "https://files.pythonhosted.org/packages/7e/f2/48b393b51900456155de3ad001900f94298965e1cad1c772b87f9cfea011/regex-2024.11.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:da8f5fc57d1933de22a9e23eec290a0d8a5927a5370d24bda9a6abe50683fe62", size = 773197, upload-time = "2024-11-06T20:09:24.092Z" }, - { url = "https://files.pythonhosted.org/packages/45/3f/ef9589aba93e084cd3f8471fded352826dcae8489b650d0b9b27bc5bba8a/regex-2024.11.6-cp310-cp310-win32.whl", hash = "sha256:b489578720afb782f6ccf2840920f3a32e31ba28a4b162e13900c3e6bd3f930e", size = 261714, upload-time = "2024-11-06T20:09:26.36Z" }, - { url = "https://files.pythonhosted.org/packages/42/7e/5f1b92c8468290c465fd50c5318da64319133231415a8aa6ea5ab995a815/regex-2024.11.6-cp310-cp310-win_amd64.whl", hash = "sha256:5071b2093e793357c9d8b2929dfc13ac5f0a6c650559503bb81189d0a3814519", size = 274042, upload-time = "2024-11-06T20:09:28.762Z" }, - { url = "https://files.pythonhosted.org/packages/58/58/7e4d9493a66c88a7da6d205768119f51af0f684fe7be7bac8328e217a52c/regex-2024.11.6-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5478c6962ad548b54a591778e93cd7c456a7a29f8eca9c49e4f9a806dcc5d638", size = 482669, upload-time = "2024-11-06T20:09:31.064Z" }, - { url = "https://files.pythonhosted.org/packages/34/4c/8f8e631fcdc2ff978609eaeef1d6994bf2f028b59d9ac67640ed051f1218/regex-2024.11.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2c89a8cc122b25ce6945f0423dc1352cb9593c68abd19223eebbd4e56612c5b7", size = 287684, upload-time = "2024-11-06T20:09:32.915Z" }, - { url = "https://files.pythonhosted.org/packages/c5/1b/f0e4d13e6adf866ce9b069e191f303a30ab1277e037037a365c3aad5cc9c/regex-2024.11.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:94d87b689cdd831934fa3ce16cc15cd65748e6d689f5d2b8f4f4df2065c9fa20", size = 284589, upload-time = "2024-11-06T20:09:35.504Z" }, - { url = "https://files.pythonhosted.org/packages/25/4d/ab21047f446693887f25510887e6820b93f791992994f6498b0318904d4a/regex-2024.11.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1062b39a0a2b75a9c694f7a08e7183a80c63c0d62b301418ffd9c35f55aaa114", size = 792121, upload-time = "2024-11-06T20:09:37.701Z" }, - { url = "https://files.pythonhosted.org/packages/45/ee/c867e15cd894985cb32b731d89576c41a4642a57850c162490ea34b78c3b/regex-2024.11.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:167ed4852351d8a750da48712c3930b031f6efdaa0f22fa1933716bfcd6bf4a3", size = 831275, upload-time = "2024-11-06T20:09:40.371Z" }, - { url = "https://files.pythonhosted.org/packages/b3/12/b0f480726cf1c60f6536fa5e1c95275a77624f3ac8fdccf79e6727499e28/regex-2024.11.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d548dafee61f06ebdb584080621f3e0c23fff312f0de1afc776e2a2ba99a74f", size = 818257, upload-time = "2024-11-06T20:09:43.059Z" }, - { url = "https://files.pythonhosted.org/packages/bf/ce/0d0e61429f603bac433910d99ef1a02ce45a8967ffbe3cbee48599e62d88/regex-2024.11.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2a19f302cd1ce5dd01a9099aaa19cae6173306d1302a43b627f62e21cf18ac0", size = 792727, upload-time = "2024-11-06T20:09:48.19Z" }, - { url = "https://files.pythonhosted.org/packages/e4/c1/243c83c53d4a419c1556f43777ccb552bccdf79d08fda3980e4e77dd9137/regex-2024.11.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bec9931dfb61ddd8ef2ebc05646293812cb6b16b60cf7c9511a832b6f1854b55", size = 780667, upload-time = "2024-11-06T20:09:49.828Z" }, - { url = "https://files.pythonhosted.org/packages/c5/f4/75eb0dd4ce4b37f04928987f1d22547ddaf6c4bae697623c1b05da67a8aa/regex-2024.11.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9714398225f299aa85267fd222f7142fcb5c769e73d7733344efc46f2ef5cf89", size = 776963, upload-time = "2024-11-06T20:09:51.819Z" }, - { url = "https://files.pythonhosted.org/packages/16/5d/95c568574e630e141a69ff8a254c2f188b4398e813c40d49228c9bbd9875/regex-2024.11.6-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:202eb32e89f60fc147a41e55cb086db2a3f8cb82f9a9a88440dcfc5d37faae8d", size = 784700, upload-time = "2024-11-06T20:09:53.982Z" }, - { url = "https://files.pythonhosted.org/packages/8e/b5/f8495c7917f15cc6fee1e7f395e324ec3e00ab3c665a7dc9d27562fd5290/regex-2024.11.6-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:4181b814e56078e9b00427ca358ec44333765f5ca1b45597ec7446d3a1ef6e34", size = 848592, upload-time = "2024-11-06T20:09:56.222Z" }, - { url = "https://files.pythonhosted.org/packages/1c/80/6dd7118e8cb212c3c60b191b932dc57db93fb2e36fb9e0e92f72a5909af9/regex-2024.11.6-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:068376da5a7e4da51968ce4c122a7cd31afaaec4fccc7856c92f63876e57b51d", size = 852929, upload-time = "2024-11-06T20:09:58.642Z" }, - { url = "https://files.pythonhosted.org/packages/11/9b/5a05d2040297d2d254baf95eeeb6df83554e5e1df03bc1a6687fc4ba1f66/regex-2024.11.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ac10f2c4184420d881a3475fb2c6f4d95d53a8d50209a2500723d831036f7c45", size = 781213, upload-time = "2024-11-06T20:10:00.867Z" }, - { url = "https://files.pythonhosted.org/packages/26/b7/b14e2440156ab39e0177506c08c18accaf2b8932e39fb092074de733d868/regex-2024.11.6-cp311-cp311-win32.whl", hash = "sha256:c36f9b6f5f8649bb251a5f3f66564438977b7ef8386a52460ae77e6070d309d9", size = 261734, upload-time = "2024-11-06T20:10:03.361Z" }, - { url = "https://files.pythonhosted.org/packages/80/32/763a6cc01d21fb3819227a1cc3f60fd251c13c37c27a73b8ff4315433a8e/regex-2024.11.6-cp311-cp311-win_amd64.whl", hash = "sha256:02e28184be537f0e75c1f9b2f8847dc51e08e6e171c6bde130b2687e0c33cf60", size = 274052, upload-time = "2024-11-06T20:10:05.179Z" }, - { url = "https://files.pythonhosted.org/packages/ba/30/9a87ce8336b172cc232a0db89a3af97929d06c11ceaa19d97d84fa90a8f8/regex-2024.11.6-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:52fb28f528778f184f870b7cf8f225f5eef0a8f6e3778529bdd40c7b3920796a", size = 483781, upload-time = "2024-11-06T20:10:07.07Z" }, - { url = "https://files.pythonhosted.org/packages/01/e8/00008ad4ff4be8b1844786ba6636035f7ef926db5686e4c0f98093612add/regex-2024.11.6-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fdd6028445d2460f33136c55eeb1f601ab06d74cb3347132e1c24250187500d9", size = 288455, upload-time = "2024-11-06T20:10:09.117Z" }, - { url = "https://files.pythonhosted.org/packages/60/85/cebcc0aff603ea0a201667b203f13ba75d9fc8668fab917ac5b2de3967bc/regex-2024.11.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:805e6b60c54bf766b251e94526ebad60b7de0c70f70a4e6210ee2891acb70bf2", size = 284759, upload-time = "2024-11-06T20:10:11.155Z" }, - { url = "https://files.pythonhosted.org/packages/94/2b/701a4b0585cb05472a4da28ee28fdfe155f3638f5e1ec92306d924e5faf0/regex-2024.11.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b85c2530be953a890eaffde05485238f07029600e8f098cdf1848d414a8b45e4", size = 794976, upload-time = "2024-11-06T20:10:13.24Z" }, - { url = "https://files.pythonhosted.org/packages/4b/bf/fa87e563bf5fee75db8915f7352e1887b1249126a1be4813837f5dbec965/regex-2024.11.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bb26437975da7dc36b7efad18aa9dd4ea569d2357ae6b783bf1118dabd9ea577", size = 833077, upload-time = "2024-11-06T20:10:15.37Z" }, - { url = "https://files.pythonhosted.org/packages/a1/56/7295e6bad94b047f4d0834e4779491b81216583c00c288252ef625c01d23/regex-2024.11.6-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:abfa5080c374a76a251ba60683242bc17eeb2c9818d0d30117b4486be10c59d3", size = 823160, upload-time = "2024-11-06T20:10:19.027Z" }, - { url = "https://files.pythonhosted.org/packages/fb/13/e3b075031a738c9598c51cfbc4c7879e26729c53aa9cca59211c44235314/regex-2024.11.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b7fa6606c2881c1db9479b0eaa11ed5dfa11c8d60a474ff0e095099f39d98e", size = 796896, upload-time = "2024-11-06T20:10:21.85Z" }, - { url = "https://files.pythonhosted.org/packages/24/56/0b3f1b66d592be6efec23a795b37732682520b47c53da5a32c33ed7d84e3/regex-2024.11.6-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0c32f75920cf99fe6b6c539c399a4a128452eaf1af27f39bce8909c9a3fd8cbe", size = 783997, upload-time = "2024-11-06T20:10:24.329Z" }, - { url = "https://files.pythonhosted.org/packages/f9/a1/eb378dada8b91c0e4c5f08ffb56f25fcae47bf52ad18f9b2f33b83e6d498/regex-2024.11.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:982e6d21414e78e1f51cf595d7f321dcd14de1f2881c5dc6a6e23bbbbd68435e", size = 781725, upload-time = "2024-11-06T20:10:28.067Z" }, - { url = "https://files.pythonhosted.org/packages/83/f2/033e7dec0cfd6dda93390089864732a3409246ffe8b042e9554afa9bff4e/regex-2024.11.6-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a7c2155f790e2fb448faed6dd241386719802296ec588a8b9051c1f5c481bc29", size = 789481, upload-time = "2024-11-06T20:10:31.612Z" }, - { url = "https://files.pythonhosted.org/packages/83/23/15d4552ea28990a74e7696780c438aadd73a20318c47e527b47a4a5a596d/regex-2024.11.6-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:149f5008d286636e48cd0b1dd65018548944e495b0265b45e1bffecce1ef7f39", size = 852896, upload-time = "2024-11-06T20:10:34.054Z" }, - { url = "https://files.pythonhosted.org/packages/e3/39/ed4416bc90deedbfdada2568b2cb0bc1fdb98efe11f5378d9892b2a88f8f/regex-2024.11.6-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:e5364a4502efca094731680e80009632ad6624084aff9a23ce8c8c6820de3e51", size = 860138, upload-time = "2024-11-06T20:10:36.142Z" }, - { url = "https://files.pythonhosted.org/packages/93/2d/dd56bb76bd8e95bbce684326302f287455b56242a4f9c61f1bc76e28360e/regex-2024.11.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0a86e7eeca091c09e021db8eb72d54751e527fa47b8d5787caf96d9831bd02ad", size = 787692, upload-time = "2024-11-06T20:10:38.394Z" }, - { url = "https://files.pythonhosted.org/packages/0b/55/31877a249ab7a5156758246b9c59539abbeba22461b7d8adc9e8475ff73e/regex-2024.11.6-cp312-cp312-win32.whl", hash = "sha256:32f9a4c643baad4efa81d549c2aadefaeba12249b2adc5af541759237eee1c54", size = 262135, upload-time = "2024-11-06T20:10:40.367Z" }, - { url = "https://files.pythonhosted.org/packages/38/ec/ad2d7de49a600cdb8dd78434a1aeffe28b9d6fc42eb36afab4a27ad23384/regex-2024.11.6-cp312-cp312-win_amd64.whl", hash = "sha256:a93c194e2df18f7d264092dc8539b8ffb86b45b899ab976aa15d48214138e81b", size = 273567, upload-time = "2024-11-06T20:10:43.467Z" }, +version = "2026.4.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/cb/0e/3a246dbf05666918bd3664d9d787f84a9108f6f43cc953a077e4a7dfdb7e/regex-2026.4.4.tar.gz", hash = "sha256:e08270659717f6973523ce3afbafa53515c4dc5dcad637dc215b6fd50f689423", size = 416000, upload-time = "2026-04-03T20:56:28.155Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/12/59/fd98f8fd54b3feaa76a855324c676c17668c5a1121ec91b7ec96b01bf865/regex-2026.4.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:74fa82dcc8143386c7c0392e18032009d1db715c25f4ba22d23dc2e04d02a20f", size = 489403, upload-time = "2026-04-03T20:52:39.742Z" }, + { url = "https://files.pythonhosted.org/packages/6c/64/d0f222f68e3579d50babf0e4fcc9c9639ef0587fecc00b15e1e46bfc32fa/regex-2026.4.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a85b620a388d6c9caa12189233109e236b3da3deffe4ff11b84ae84e218a274f", size = 291208, upload-time = "2026-04-03T20:52:42.943Z" }, + { url = "https://files.pythonhosted.org/packages/16/7f/3fab9709b0b0060ba81a04b8a107b34147cd14b9c5551b772154d6505504/regex-2026.4.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2895506ebe32cc63eeed8f80e6eae453171cfccccab35b70dc3129abec35a5b8", size = 289214, upload-time = "2026-04-03T20:52:44.648Z" }, + { url = "https://files.pythonhosted.org/packages/14/bc/f5dcf04fd462139dcd75495c02eee22032ef741cfa151386a39c3f5fc9b5/regex-2026.4.4-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6780f008ee81381c737634e75c24e5a6569cc883c4f8e37a37917ee79efcafd9", size = 785505, upload-time = "2026-04-03T20:52:46.35Z" }, + { url = "https://files.pythonhosted.org/packages/37/36/8a906e216d5b4de7ec3788c1d589b45db40c1c9580cd7b326835cfc976d4/regex-2026.4.4-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:88e9b048345c613f253bea4645b2fe7e579782b82cac99b1daad81e29cc2ed8e", size = 852129, upload-time = "2026-04-03T20:52:48.661Z" }, + { url = "https://files.pythonhosted.org/packages/a5/bb/bad2d79be0917a6ef31f5e0f161d9265cb56fd90a3ae1d2e8d991882a48b/regex-2026.4.4-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:be061028481186ba62a0f4c5f1cc1e3d5ab8bce70c89236ebe01023883bc903b", size = 899578, upload-time = "2026-04-03T20:52:50.61Z" }, + { url = "https://files.pythonhosted.org/packages/1a/b9/7cd0ceb58cd99c70806241636640ae15b4a3fe62e22e9b99afa67a0d7965/regex-2026.4.4-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d2228c02b368d69b724c36e96d3d1da721561fb9cc7faa373d7bf65e07d75cb5", size = 793634, upload-time = "2026-04-03T20:52:53Z" }, + { url = "https://files.pythonhosted.org/packages/2c/fb/c58e3ea40ed183806ccbac05c29a3e8c2f88c1d3a66ed27860d5cad7c62d/regex-2026.4.4-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0540e5b733618a2f84e9cb3e812c8afa82e151ca8e19cf6c4e95c5a65198236f", size = 786210, upload-time = "2026-04-03T20:52:54.713Z" }, + { url = "https://files.pythonhosted.org/packages/54/a9/53790fc7a6c948a7be2bc7214fd9cabdd0d1ba561b0f401c91f4ff0357f0/regex-2026.4.4-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:cf9b1b2e692d4877880388934ac746c99552ce6bf40792a767fd42c8c99f136d", size = 769930, upload-time = "2026-04-03T20:52:56.825Z" }, + { url = "https://files.pythonhosted.org/packages/e3/3c/29ca44729191c79f5476538cd0fa04fa2553b3c45508519ecea4c7afa8f6/regex-2026.4.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:011bb48bffc1b46553ac704c975b3348717f4e4aa7a67522b51906f99da1820c", size = 774892, upload-time = "2026-04-03T20:52:58.934Z" }, + { url = "https://files.pythonhosted.org/packages/3e/db/6ae74ef8a4cfead341c367e4eed45f71fb1aaba35827a775eed4f1ba4f74/regex-2026.4.4-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:8512fcdb43f1bf18582698a478b5ab73f9c1667a5b7548761329ef410cd0a760", size = 848816, upload-time = "2026-04-03T20:53:00.684Z" }, + { url = "https://files.pythonhosted.org/packages/53/9a/f7f2c1c6b610d7c6de1c3dc5951effd92c324b1fde761af2044b4721020f/regex-2026.4.4-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:867bddc63109a0276f5a31999e4c8e0eb7bbbad7d6166e28d969a2c1afeb97f9", size = 758363, upload-time = "2026-04-03T20:53:02.155Z" }, + { url = "https://files.pythonhosted.org/packages/dd/55/e5386d393bbf8b43c8b084703a46d635e7b2bdc6e0f5909a2619ea1125f1/regex-2026.4.4-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:1b9a00b83f3a40e09859c78920571dcb83293c8004079653dd22ec14bbfa98c7", size = 837122, upload-time = "2026-04-03T20:53:03.727Z" }, + { url = "https://files.pythonhosted.org/packages/01/da/cc78710ea2e60b10bacfcc9beb18c67514200ab03597b3b2b319995785c2/regex-2026.4.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e355be718caf838aa089870259cf1776dc2a4aa980514af9d02c59544d9a8b22", size = 782140, upload-time = "2026-04-03T20:53:05.608Z" }, + { url = "https://files.pythonhosted.org/packages/a2/5f/c7bcba41529105d6c2ca7080ecab7184cd00bee2e1ad1fdea80e618704ea/regex-2026.4.4-cp310-cp310-win32.whl", hash = "sha256:33bfda9684646d323414df7abe5692c61d297dbb0530b28ec66442e768813c59", size = 266225, upload-time = "2026-04-03T20:53:07.342Z" }, + { url = "https://files.pythonhosted.org/packages/eb/26/a745729c2c49354ec4f4bce168f29da932ca01b4758227686cc16c7dde1b/regex-2026.4.4-cp310-cp310-win_amd64.whl", hash = "sha256:0709f22a56798457ae317bcce42aacee33c680068a8f14097430d9f9ba364bee", size = 278393, upload-time = "2026-04-03T20:53:08.65Z" }, + { url = "https://files.pythonhosted.org/packages/87/8b/4327eeb9dbb4b098ebecaf02e9f82b79b6077beeb54c43d9a0660cf7c44c/regex-2026.4.4-cp310-cp310-win_arm64.whl", hash = "sha256:ee9627de8587c1a22201cb16d0296ab92b4df5cdcb5349f4e9744d61db7c7c98", size = 270470, upload-time = "2026-04-03T20:53:10.018Z" }, + { url = "https://files.pythonhosted.org/packages/e0/7a/617356cbecdb452812a5d42f720d6d5096b360d4a4c1073af700ea140ad2/regex-2026.4.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b4c36a85b00fadb85db9d9e90144af0a980e1a3d2ef9cd0f8a5bef88054657c6", size = 489415, upload-time = "2026-04-03T20:53:11.645Z" }, + { url = "https://files.pythonhosted.org/packages/20/e6/bf057227144d02e3ba758b66649e87531d744dda5f3254f48660f18ae9d8/regex-2026.4.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:dcb5453ecf9cd58b562967badd1edbf092b0588a3af9e32ee3d05c985077ce87", size = 291205, upload-time = "2026-04-03T20:53:13.289Z" }, + { url = "https://files.pythonhosted.org/packages/eb/3b/637181b787dd1a820ba1c712cee2b4144cd84a32dc776ca067b12b2d70c8/regex-2026.4.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6aa809ed4dc3706cc38594d67e641601bd2f36d5555b2780ff074edfcb136cf8", size = 289225, upload-time = "2026-04-03T20:53:16.002Z" }, + { url = "https://files.pythonhosted.org/packages/05/21/bac05d806ed02cd4b39d9c8e5b5f9a2998c94c3a351b7792e80671fa5315/regex-2026.4.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:33424f5188a7db12958246a54f59a435b6cb62c5cf9c8d71f7cc49475a5fdada", size = 792434, upload-time = "2026-04-03T20:53:17.414Z" }, + { url = "https://files.pythonhosted.org/packages/d9/17/c65d1d8ae90b772d5758eb4014e1e011bb2db353fc4455432e6cc9100df7/regex-2026.4.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:7d346fccdde28abba117cc9edc696b9518c3307fbfcb689e549d9b5979018c6d", size = 861730, upload-time = "2026-04-03T20:53:18.903Z" }, + { url = "https://files.pythonhosted.org/packages/ad/64/933321aa082a2c6ee2785f22776143ba89840189c20d3b6b1d12b6aae16b/regex-2026.4.4-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:415a994b536440f5011aa77e50a4274d15da3245e876e5c7f19da349caaedd87", size = 906495, upload-time = "2026-04-03T20:53:20.561Z" }, + { url = "https://files.pythonhosted.org/packages/01/ea/4c8d306e9c36ac22417336b1e02e7b358152c34dc379673f2d331143725f/regex-2026.4.4-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:21e5eb86179b4c67b5759d452ea7c48eb135cd93308e7a260aa489ed2eb423a4", size = 799810, upload-time = "2026-04-03T20:53:22.961Z" }, + { url = "https://files.pythonhosted.org/packages/29/ce/7605048f00e1379eba89d610c7d644d8f695dc9b26d3b6ecfa3132b872ff/regex-2026.4.4-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:312ec9dd1ae7d96abd8c5a36a552b2139931914407d26fba723f9e53c8186f86", size = 774242, upload-time = "2026-04-03T20:53:25.015Z" }, + { url = "https://files.pythonhosted.org/packages/e9/77/283e0d5023fde22cd9e86190d6d9beb21590a452b195ffe00274de470691/regex-2026.4.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a0d2b28aa1354c7cd7f71b7658c4326f7facac106edd7f40eda984424229fd59", size = 781257, upload-time = "2026-04-03T20:53:26.918Z" }, + { url = "https://files.pythonhosted.org/packages/8b/fb/7f3b772be101373c8626ed34c5d727dcbb8abd42a7b1219bc25fd9a3cc04/regex-2026.4.4-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:349d7310eddff40429a099c08d995c6d4a4bfaf3ff40bd3b5e5cb5a5a3c7d453", size = 854490, upload-time = "2026-04-03T20:53:29.065Z" }, + { url = "https://files.pythonhosted.org/packages/85/30/56547b80f34f4dd2986e1cdd63b1712932f63b6c4ce2f79c50a6cd79d1c2/regex-2026.4.4-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:e7ab63e9fe45a9ec3417509e18116b367e89c9ceb6219222a3396fa30b147f80", size = 763544, upload-time = "2026-04-03T20:53:30.917Z" }, + { url = "https://files.pythonhosted.org/packages/ac/2f/ce060fdfea8eff34a8997603532e44cdb7d1f35e3bc253612a8707a90538/regex-2026.4.4-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:fe896e07a5a2462308297e515c0054e9ec2dd18dfdc9427b19900b37dfe6f40b", size = 844442, upload-time = "2026-04-03T20:53:32.463Z" }, + { url = "https://files.pythonhosted.org/packages/e5/44/810cb113096a1dacbe82789fbfab2823f79d19b7f1271acecb7009ba9b88/regex-2026.4.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:eb59c65069498dbae3c0ef07bbe224e1eaa079825a437fb47a479f0af11f774f", size = 789162, upload-time = "2026-04-03T20:53:34.039Z" }, + { url = "https://files.pythonhosted.org/packages/20/96/9647dd7f2ecf6d9ce1fb04dfdb66910d094e10d8fe53e9c15096d8aa0bd2/regex-2026.4.4-cp311-cp311-win32.whl", hash = "sha256:2a5d273181b560ef8397c8825f2b9d57013de744da9e8257b8467e5da8599351", size = 266227, upload-time = "2026-04-03T20:53:35.601Z" }, + { url = "https://files.pythonhosted.org/packages/33/80/74e13262460530c3097ff343a17de9a34d040a5dc4de9cf3a8241faab51c/regex-2026.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:9542ccc1e689e752594309444081582f7be2fdb2df75acafea8a075108566735", size = 278399, upload-time = "2026-04-03T20:53:37.021Z" }, + { url = "https://files.pythonhosted.org/packages/1c/3c/39f19f47f19dcefa3403f09d13562ca1c0fd07ab54db2bc03148f3f6b46a/regex-2026.4.4-cp311-cp311-win_arm64.whl", hash = "sha256:b5f9fb784824a042be3455b53d0b112655686fdb7a91f88f095f3fee1e2a2a54", size = 270473, upload-time = "2026-04-03T20:53:38.633Z" }, + { url = "https://files.pythonhosted.org/packages/e5/28/b972a4d3df61e1d7bcf1b59fdb3cddef22f88b6be43f161bb41ebc0e4081/regex-2026.4.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:c07ab8794fa929e58d97a0e1796b8b76f70943fa39df225ac9964615cf1f9d52", size = 490434, upload-time = "2026-04-03T20:53:40.219Z" }, + { url = "https://files.pythonhosted.org/packages/84/20/30041446cf6dc3e0eab344fc62770e84c23b6b68a3b657821f9f80cb69b4/regex-2026.4.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:2c785939dc023a1ce4ec09599c032cc9933d258a998d16ca6f2b596c010940eb", size = 292061, upload-time = "2026-04-03T20:53:41.862Z" }, + { url = "https://files.pythonhosted.org/packages/62/c8/3baa06d75c98c46d4cc4262b71fd2edb9062b5665e868bca57859dadf93a/regex-2026.4.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1b1ce5c81c9114f1ce2f9288a51a8fd3aeea33a0cc440c415bf02da323aa0a76", size = 289628, upload-time = "2026-04-03T20:53:43.701Z" }, + { url = "https://files.pythonhosted.org/packages/31/87/3accf55634caad8c0acab23f5135ef7d4a21c39f28c55c816ae012931408/regex-2026.4.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:760ef21c17d8e6a4fe8cf406a97cf2806a4df93416ccc82fc98d25b1c20425be", size = 796651, upload-time = "2026-04-03T20:53:45.379Z" }, + { url = "https://files.pythonhosted.org/packages/f6/0c/aaa2c83f34efedbf06f61cb1942c25f6cf1ee3b200f832c4d05f28306c2e/regex-2026.4.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:7088fcdcb604a4417c208e2169715800d28838fefd7455fbe40416231d1d47c1", size = 865916, upload-time = "2026-04-03T20:53:47.064Z" }, + { url = "https://files.pythonhosted.org/packages/d9/f6/8c6924c865124643e8f37823eca845dc27ac509b2ee58123685e71cd0279/regex-2026.4.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:07edca1ba687998968f7db5bc355288d0c6505caa7374f013d27356d93976d13", size = 912287, upload-time = "2026-04-03T20:53:49.422Z" }, + { url = "https://files.pythonhosted.org/packages/11/0e/a9f6f81013e0deaf559b25711623864970fe6a098314e374ccb1540a4152/regex-2026.4.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:993f657a7c1c6ec51b5e0ba97c9817d06b84ea5fa8d82e43b9405de0defdc2b9", size = 801126, upload-time = "2026-04-03T20:53:51.096Z" }, + { url = "https://files.pythonhosted.org/packages/71/61/3a0cc8af2dc0c8deb48e644dd2521f173f7e6513c6e195aad9aa8dd77ac5/regex-2026.4.4-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:2b69102a743e7569ebee67e634a69c4cb7e59d6fa2e1aa7d3bdbf3f61435f62d", size = 776788, upload-time = "2026-04-03T20:53:52.889Z" }, + { url = "https://files.pythonhosted.org/packages/64/0b/8bb9cbf21ef7dee58e49b0fdb066a7aded146c823202e16494a36777594f/regex-2026.4.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dac006c8b6dda72d86ea3d1333d45147de79a3a3f26f10c1cf9287ca4ca0ac3", size = 785184, upload-time = "2026-04-03T20:53:55.627Z" }, + { url = "https://files.pythonhosted.org/packages/99/c2/d3e80e8137b25ee06c92627de4e4d98b94830e02b3e6f81f3d2e3f504cf5/regex-2026.4.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:50a766ee2010d504554bfb5f578ed2e066898aa26411d57e6296230627cdefa0", size = 859913, upload-time = "2026-04-03T20:53:57.249Z" }, + { url = "https://files.pythonhosted.org/packages/bc/e6/9d5d876157d969c804622456ef250017ac7a8f83e0e14f903b9e6df5ce95/regex-2026.4.4-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:9e2f5217648f68e3028c823df58663587c1507a5ba8419f4fdfc8a461be76043", size = 765732, upload-time = "2026-04-03T20:53:59.428Z" }, + { url = "https://files.pythonhosted.org/packages/82/80/b568935b4421388561c8ed42aff77247285d3ae3bb2a6ca22af63bae805e/regex-2026.4.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:39d8de85a08e32632974151ba59c6e9140646dcc36c80423962b1c5c0a92e244", size = 852152, upload-time = "2026-04-03T20:54:01.505Z" }, + { url = "https://files.pythonhosted.org/packages/39/29/f0f81217e21cd998245da047405366385d5c6072048038a3d33b37a79dc0/regex-2026.4.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:55d9304e0e7178dfb1e106c33edf834097ddf4a890e2f676f6c5118f84390f73", size = 789076, upload-time = "2026-04-03T20:54:03.323Z" }, + { url = "https://files.pythonhosted.org/packages/49/1d/1d957a61976ab9d4e767dd4f9d04b66cc0c41c5e36cf40e2d43688b5ae6f/regex-2026.4.4-cp312-cp312-win32.whl", hash = "sha256:04bb679bc0bde8a7bfb71e991493d47314e7b98380b083df2447cda4b6edb60f", size = 266700, upload-time = "2026-04-03T20:54:05.639Z" }, + { url = "https://files.pythonhosted.org/packages/c5/5c/bf575d396aeb58ea13b06ef2adf624f65b70fafef6950a80fc3da9cae3bc/regex-2026.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:db0ac18435a40a2543dbb3d21e161a6c78e33e8159bd2e009343d224bb03bb1b", size = 277768, upload-time = "2026-04-03T20:54:07.312Z" }, + { url = "https://files.pythonhosted.org/packages/c9/27/049df16ec6a6828ccd72add3c7f54b4df029669bea8e9817df6fff58be90/regex-2026.4.4-cp312-cp312-win_arm64.whl", hash = "sha256:4ce255cc05c1947a12989c6db801c96461947adb7a59990f1360b5983fab4983", size = 270568, upload-time = "2026-04-03T20:54:09.484Z" }, ] [[package]] name = "requests" -version = "2.32.4" +version = "2.33.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "certifi" }, @@ -3729,9 +11708,9 @@ dependencies = [ { name = "idna" }, { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/e1/0a/929373653770d8a0d7ea76c37de6e41f11eb07559b103b1c02cafb3f7cf8/requests-2.32.4.tar.gz", hash = "sha256:27d0316682c8a29834d3264820024b62a36942083d52caf2f14c0591336d3422", size = 135258, upload-time = "2025-06-09T16:43:07.34Z" } +sdist = { url = "https://files.pythonhosted.org/packages/5f/a4/98b9c7c6428a668bf7e42ebb7c79d576a1c3c1e3ae2d47e674b468388871/requests-2.33.1.tar.gz", hash = "sha256:18817f8c57c6263968bc123d237e3b8b08ac046f5456bd1e307ee8f4250d3517", size = 134120, upload-time = "2026-03-30T16:09:15.531Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7c/e4/56027c4a6b4ae70ca9de302488c5ca95ad4a39e190093d6c1a8ace08341b/requests-2.32.4-py3-none-any.whl", hash = "sha256:27babd3cda2a6d50b30443204ee89830707d396671944c998b5975b031ac2b2c", size = 64847, upload-time = "2025-06-09T16:43:05.728Z" }, + { url = "https://files.pythonhosted.org/packages/d7/8e/7540e8a2036f79a125c1d2ebadf69ed7901608859186c856fa0388ef4197/requests-2.33.1-py3-none-any.whl", hash = "sha256:4e6d1ef462f3626a1f0a0a9c42dd93c63bad33f9f1c1937509b8c5c8718ab56a", size = 64947, upload-time = "2026-03-30T16:09:13.83Z" }, ] [[package]] @@ -3766,21 +11745,20 @@ dependencies = [ { name = "ftfy" }, { name = "packaging" }, { name = "regex" }, - { name = "torch", version = "2.6.0+cu124", source = { registry = "https://download.pytorch.org/whl/cu124" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torch", version = "2.7.1", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torch", version = "2.7.1+cpu", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torch", version = "2.7.1+cu118", source = { registry = "https://download.pytorch.org/whl/cu118" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torch", version = "2.7.1+cu128", source = { registry = "https://download.pytorch.org/whl/cu128" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torch", version = "2.8.0", source = { registry = "https://pypi.jetson-ai-lab.io/jp6/cu126/+simple" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torch", version = "2.8.0", source = { registry = "https://pypi.org/simple" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torchvision", version = "0.21.0+cu124", source = { registry = "https://download.pytorch.org/whl/cu124" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torchvision", version = "0.22.1", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torchvision", version = "0.22.1", source = { registry = "https://download.pytorch.org/whl/cu128" }, marker = "(platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torchvision", version = "0.22.1+cpu", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(platform_machine != 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torchvision", version = "0.22.1+cu118", source = { registry = "https://download.pytorch.org/whl/cu118" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torchvision", version = "0.22.1+cu128", source = { registry = "https://download.pytorch.org/whl/cu128" }, marker = "(platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torchvision", version = "0.23.0", source = { registry = "https://pypi.jetson-ai-lab.io/jp6/cu126/+simple" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torchvision", version = "0.23.0", source = { registry = "https://pypi.org/simple" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torch", version = "2.6.0+cu124", source = { registry = "https://download.pytorch.org/whl/cu124" }, marker = "extra == 'extra-16-inference-models-torch-cu124' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torch", version = "2.7.1+cu118", source = { registry = "https://download.pytorch.org/whl/cu118" }, marker = "extra == 'extra-16-inference-models-torch-cu118' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torch", version = "2.11.0", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torch", version = "2.11.0", source = { registry = "https://pypi.jetson-ai-lab.io/jp6/cu126/+simple" }, marker = "extra == 'extra-16-inference-models-torch-jp6-cu126' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126')" }, + { name = "torch", version = "2.11.0", source = { registry = "https://pypi.org/simple" }, marker = "extra == 'extra-16-inference-models-torch-cu126' or extra == 'extra-16-inference-models-torch-cu130' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torch", version = "2.11.0+cpu", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torch", version = "2.11.0+cu128", source = { registry = "https://download.pytorch.org/whl/cu128" }, marker = "extra == 'extra-16-inference-models-torch-cu128' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torchvision", version = "0.21.0+cu124", source = { registry = "https://download.pytorch.org/whl/cu124" }, marker = "extra == 'extra-16-inference-models-torch-cu124' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torchvision", version = "0.22.1+cu118", source = { registry = "https://download.pytorch.org/whl/cu118" }, marker = "extra == 'extra-16-inference-models-torch-cu118' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torchvision", version = "0.26.0", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torchvision", version = "0.26.0", source = { registry = "https://pypi.jetson-ai-lab.io/jp6/cu126/+simple" }, marker = "extra == 'extra-16-inference-models-torch-jp6-cu126' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126')" }, + { name = "torchvision", version = "0.26.0", source = { registry = "https://pypi.org/simple" }, marker = "extra == 'extra-16-inference-models-torch-cu126' or extra == 'extra-16-inference-models-torch-cu130' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torchvision", version = "0.26.0+cpu", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torchvision", version = "0.26.0+cu128", source = { registry = "https://download.pytorch.org/whl/cu128" }, marker = "extra == 'extra-16-inference-models-torch-cu128' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, { name = "tqdm" }, ] wheels = [ @@ -3793,26 +11771,28 @@ version = "0.3.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "addict" }, - { name = "numpy" }, - { name = "opencv-python" }, + { name = "numpy", version = "1.26.4", source = { registry = "https://pypi.org/simple" }, marker = "extra == 'extra-16-inference-models-onnx-jp6-cu126' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and extra == 'extra-16-inference-models-falcon-perception') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "2.4.4", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and extra == 'extra-16-inference-models-falcon-perception') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "opencv-python", version = "4.11.0.86", source = { registry = "https://pypi.org/simple" }, marker = "extra == 'extra-16-inference-models-onnx-jp6-cu126' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "opencv-python", version = "4.13.0.92", source = { registry = "https://pypi.org/simple" }, marker = "extra == 'extra-16-inference-models-falcon-perception' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, { name = "pycocotools" }, { name = "supervision" }, { name = "timm" }, - { name = "torch", version = "2.6.0+cu124", source = { registry = "https://download.pytorch.org/whl/cu124" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torch", version = "2.7.1", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torch", version = "2.7.1+cpu", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torch", version = "2.7.1+cu118", source = { registry = "https://download.pytorch.org/whl/cu118" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torch", version = "2.7.1+cu128", source = { registry = "https://download.pytorch.org/whl/cu128" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torch", version = "2.8.0", source = { registry = "https://pypi.jetson-ai-lab.io/jp6/cu126/+simple" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torch", version = "2.8.0", source = { registry = "https://pypi.org/simple" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torchvision", version = "0.21.0+cu124", source = { registry = "https://download.pytorch.org/whl/cu124" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torchvision", version = "0.22.1", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torchvision", version = "0.22.1", source = { registry = "https://download.pytorch.org/whl/cu128" }, marker = "(platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torchvision", version = "0.22.1+cpu", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(platform_machine != 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torchvision", version = "0.22.1+cu118", source = { registry = "https://download.pytorch.org/whl/cu118" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torchvision", version = "0.22.1+cu128", source = { registry = "https://download.pytorch.org/whl/cu128" }, marker = "(platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torchvision", version = "0.23.0", source = { registry = "https://pypi.jetson-ai-lab.io/jp6/cu126/+simple" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torchvision", version = "0.23.0", source = { registry = "https://pypi.org/simple" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torch", version = "2.6.0+cu124", source = { registry = "https://download.pytorch.org/whl/cu124" }, marker = "extra == 'extra-16-inference-models-torch-cu124' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torch", version = "2.7.1+cu118", source = { registry = "https://download.pytorch.org/whl/cu118" }, marker = "extra == 'extra-16-inference-models-torch-cu118' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torch", version = "2.11.0", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torch", version = "2.11.0", source = { registry = "https://pypi.jetson-ai-lab.io/jp6/cu126/+simple" }, marker = "extra == 'extra-16-inference-models-torch-jp6-cu126' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126')" }, + { name = "torch", version = "2.11.0", source = { registry = "https://pypi.org/simple" }, marker = "extra == 'extra-16-inference-models-torch-cu126' or extra == 'extra-16-inference-models-torch-cu130' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torch", version = "2.11.0+cpu", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torch", version = "2.11.0+cu128", source = { registry = "https://download.pytorch.org/whl/cu128" }, marker = "extra == 'extra-16-inference-models-torch-cu128' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torchvision", version = "0.21.0+cu124", source = { registry = "https://download.pytorch.org/whl/cu124" }, marker = "extra == 'extra-16-inference-models-torch-cu124' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torchvision", version = "0.22.1+cu118", source = { registry = "https://download.pytorch.org/whl/cu118" }, marker = "extra == 'extra-16-inference-models-torch-cu118' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torchvision", version = "0.26.0", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torchvision", version = "0.26.0", source = { registry = "https://pypi.jetson-ai-lab.io/jp6/cu126/+simple" }, marker = "extra == 'extra-16-inference-models-torch-jp6-cu126' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126')" }, + { name = "torchvision", version = "0.26.0", source = { registry = "https://pypi.org/simple" }, marker = "extra == 'extra-16-inference-models-torch-cu126' or extra == 'extra-16-inference-models-torch-cu130' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torchvision", version = "0.26.0+cpu", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torchvision", version = "0.26.0+cu128", source = { registry = "https://download.pytorch.org/whl/cu128" }, marker = "extra == 'extra-16-inference-models-torch-cu128' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, { name = "transformers" }, { name = "yapf" }, ] @@ -3828,10 +11808,14 @@ dependencies = [ { name = "absl-py" }, { name = "attrs" }, { name = "flatbuffers" }, - { name = "jax" }, - { name = "jaxlib" }, + { name = "jax", version = "0.6.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "jax", version = "0.7.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "jaxlib", version = "0.6.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "jaxlib", version = "0.7.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, { name = "matplotlib" }, - { name = "numpy" }, + { name = "numpy", version = "1.26.4", source = { registry = "https://pypi.org/simple" }, marker = "extra == 'extra-16-inference-models-onnx-jp6-cu126' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and extra == 'extra-16-inference-models-falcon-perception') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "2.4.4", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and extra == 'extra-16-inference-models-falcon-perception') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, { name = "opencv-contrib-python" }, { name = "protobuf" }, { name = "sounddevice" }, @@ -3861,23 +11845,24 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "hydra-core" }, { name = "iopath" }, - { name = "numpy" }, + { name = "numpy", version = "1.26.4", source = { registry = "https://pypi.org/simple" }, marker = "extra == 'extra-16-inference-models-onnx-jp6-cu126' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and extra == 'extra-16-inference-models-falcon-perception') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "2.4.4", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and extra == 'extra-16-inference-models-falcon-perception') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, { name = "pillow" }, - { name = "torch", version = "2.6.0+cu124", source = { registry = "https://download.pytorch.org/whl/cu124" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torch", version = "2.7.1", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torch", version = "2.7.1+cpu", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torch", version = "2.7.1+cu118", source = { registry = "https://download.pytorch.org/whl/cu118" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torch", version = "2.7.1+cu128", source = { registry = "https://download.pytorch.org/whl/cu128" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torch", version = "2.8.0", source = { registry = "https://pypi.jetson-ai-lab.io/jp6/cu126/+simple" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torch", version = "2.8.0", source = { registry = "https://pypi.org/simple" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torchvision", version = "0.21.0+cu124", source = { registry = "https://download.pytorch.org/whl/cu124" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torchvision", version = "0.22.1", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torchvision", version = "0.22.1", source = { registry = "https://download.pytorch.org/whl/cu128" }, marker = "(platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torchvision", version = "0.22.1+cpu", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(platform_machine != 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torchvision", version = "0.22.1+cu118", source = { registry = "https://download.pytorch.org/whl/cu118" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torchvision", version = "0.22.1+cu128", source = { registry = "https://download.pytorch.org/whl/cu128" }, marker = "(platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torchvision", version = "0.23.0", source = { registry = "https://pypi.jetson-ai-lab.io/jp6/cu126/+simple" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torchvision", version = "0.23.0", source = { registry = "https://pypi.org/simple" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torch", version = "2.6.0+cu124", source = { registry = "https://download.pytorch.org/whl/cu124" }, marker = "extra == 'extra-16-inference-models-torch-cu124' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torch", version = "2.7.1+cu118", source = { registry = "https://download.pytorch.org/whl/cu118" }, marker = "extra == 'extra-16-inference-models-torch-cu118' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torch", version = "2.11.0", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torch", version = "2.11.0", source = { registry = "https://pypi.jetson-ai-lab.io/jp6/cu126/+simple" }, marker = "extra == 'extra-16-inference-models-torch-jp6-cu126' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126')" }, + { name = "torch", version = "2.11.0", source = { registry = "https://pypi.org/simple" }, marker = "extra == 'extra-16-inference-models-torch-cu126' or extra == 'extra-16-inference-models-torch-cu130' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torch", version = "2.11.0+cpu", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torch", version = "2.11.0+cu128", source = { registry = "https://download.pytorch.org/whl/cu128" }, marker = "extra == 'extra-16-inference-models-torch-cu128' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torchvision", version = "0.21.0+cu124", source = { registry = "https://download.pytorch.org/whl/cu124" }, marker = "extra == 'extra-16-inference-models-torch-cu124' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torchvision", version = "0.22.1+cu118", source = { registry = "https://download.pytorch.org/whl/cu118" }, marker = "extra == 'extra-16-inference-models-torch-cu118' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torchvision", version = "0.26.0", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torchvision", version = "0.26.0", source = { registry = "https://pypi.jetson-ai-lab.io/jp6/cu126/+simple" }, marker = "extra == 'extra-16-inference-models-torch-jp6-cu126' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126')" }, + { name = "torchvision", version = "0.26.0", source = { registry = "https://pypi.org/simple" }, marker = "extra == 'extra-16-inference-models-torch-cu126' or extra == 'extra-16-inference-models-torch-cu130' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torchvision", version = "0.26.0+cpu", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torchvision", version = "0.26.0+cu128", source = { registry = "https://download.pytorch.org/whl/cu128" }, marker = "extra == 'extra-16-inference-models-torch-cu128' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, { name = "tqdm" }, ] sdist = { url = "https://files.pythonhosted.org/packages/28/d0/c2bf6d697b7974f85ea01d8dd1bb6e7f071ebe44dfd4e84e0b49b7a3ff87/rf_sam_2-1.0.3.tar.gz", hash = "sha256:9e7f44b5b67e094f58b1a689aa63605f597e50ec7113d73b33c578e2762ea856", size = 126639, upload-time = "2026-02-02T17:58:26.028Z" } @@ -3896,15 +11881,15 @@ wheels = [ [[package]] name = "rich" -version = "14.1.0" +version = "14.3.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "markdown-it-py" }, { name = "pygments" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/fe/75/af448d8e52bf1d8fa6a9d089ca6c07ff4453d86c65c145d0a300bb073b9b/rich-14.1.0.tar.gz", hash = "sha256:e497a48b844b0320d45007cdebfeaeed8db2a4f4bcf49f15e455cfc4af11eaa8", size = 224441, upload-time = "2025-07-25T07:32:58.125Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b3/c6/f3b320c27991c46f43ee9d856302c70dc2d0fb2dba4842ff739d5f46b393/rich-14.3.3.tar.gz", hash = "sha256:b8daa0b9e4eef54dd8cf7c86c03713f53241884e814f4e2f5fb342fe520f639b", size = 230582, upload-time = "2026-02-19T17:23:12.474Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e3/30/3c4d035596d3cf444529e0b2953ad0466f6049528a879d27534700580395/rich-14.1.0-py3-none-any.whl", hash = "sha256:536f5f1785986d6dbdea3c75205c473f970777b4a0d6c6dd1b696aa05a3fa04f", size = 243368, upload-time = "2025-07-25T07:32:56.73Z" }, + { url = "https://files.pythonhosted.org/packages/14/25/b208c5683343959b670dc001595f2f3737e051da617f66c31f7c4fa93abc/rich-14.3.3-py3-none-any.whl", hash = "sha256:793431c1f8619afa7d3b52b2cdec859562b950ea0d4b6b505397612db8d5362d", size = 310458, upload-time = "2026-02-19T17:23:13.732Z" }, ] [[package]] @@ -3973,24 +11958,28 @@ wheels = [ [[package]] name = "safetensors" -version = "0.5.3" +version = "0.7.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/71/7e/2d5d6ee7b40c0682315367ec7475693d110f512922d582fef1bd4a63adc3/safetensors-0.5.3.tar.gz", hash = "sha256:b6b0d6ecacec39a4fdd99cc19f4576f5219ce858e6fd8dbe7609df0b8dc56965", size = 67210, upload-time = "2025-02-26T09:15:13.155Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/18/ae/88f6c49dbd0cc4da0e08610019a3c78a7d390879a919411a410a1876d03a/safetensors-0.5.3-cp38-abi3-macosx_10_12_x86_64.whl", hash = "sha256:bd20eb133db8ed15b40110b7c00c6df51655a2998132193de2f75f72d99c7073", size = 436917, upload-time = "2025-02-26T09:15:03.702Z" }, - { url = "https://files.pythonhosted.org/packages/b8/3b/11f1b4a2f5d2ab7da34ecc062b0bc301f2be024d110a6466726bec8c055c/safetensors-0.5.3-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:21d01c14ff6c415c485616b8b0bf961c46b3b343ca59110d38d744e577f9cce7", size = 418419, upload-time = "2025-02-26T09:15:01.765Z" }, - { url = "https://files.pythonhosted.org/packages/5d/9a/add3e6fef267658075c5a41573c26d42d80c935cdc992384dfae435feaef/safetensors-0.5.3-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:11bce6164887cd491ca75c2326a113ba934be596e22b28b1742ce27b1d076467", size = 459493, upload-time = "2025-02-26T09:14:51.812Z" }, - { url = "https://files.pythonhosted.org/packages/df/5c/bf2cae92222513cc23b3ff85c4a1bb2811a2c3583ac0f8e8d502751de934/safetensors-0.5.3-cp38-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4a243be3590bc3301c821da7a18d87224ef35cbd3e5f5727e4e0728b8172411e", size = 472400, upload-time = "2025-02-26T09:14:53.549Z" }, - { url = "https://files.pythonhosted.org/packages/58/11/7456afb740bd45782d0f4c8e8e1bb9e572f1bf82899fb6ace58af47b4282/safetensors-0.5.3-cp38-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8bd84b12b1670a6f8e50f01e28156422a2bc07fb16fc4e98bded13039d688a0d", size = 522891, upload-time = "2025-02-26T09:14:55.717Z" }, - { url = "https://files.pythonhosted.org/packages/57/3d/fe73a9d2ace487e7285f6e157afee2383bd1ddb911b7cb44a55cf812eae3/safetensors-0.5.3-cp38-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:391ac8cab7c829452175f871fcaf414aa1e292b5448bd02620f675a7f3e7abb9", size = 537694, upload-time = "2025-02-26T09:14:57.036Z" }, - { url = "https://files.pythonhosted.org/packages/a6/f8/dae3421624fcc87a89d42e1898a798bc7ff72c61f38973a65d60df8f124c/safetensors-0.5.3-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cead1fa41fc54b1e61089fa57452e8834f798cb1dc7a09ba3524f1eb08e0317a", size = 471642, upload-time = "2025-02-26T09:15:00.544Z" }, - { url = "https://files.pythonhosted.org/packages/ce/20/1fbe16f9b815f6c5a672f5b760951e20e17e43f67f231428f871909a37f6/safetensors-0.5.3-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1077f3e94182d72618357b04b5ced540ceb71c8a813d3319f1aba448e68a770d", size = 502241, upload-time = "2025-02-26T09:14:58.303Z" }, - { url = "https://files.pythonhosted.org/packages/5f/18/8e108846b506487aa4629fe4116b27db65c3dde922de2c8e0cc1133f3f29/safetensors-0.5.3-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:799021e78287bac619c7b3f3606730a22da4cda27759ddf55d37c8db7511c74b", size = 638001, upload-time = "2025-02-26T09:15:05.79Z" }, - { url = "https://files.pythonhosted.org/packages/82/5a/c116111d8291af6c8c8a8b40628fe833b9db97d8141c2a82359d14d9e078/safetensors-0.5.3-cp38-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:df26da01aaac504334644e1b7642fa000bfec820e7cef83aeac4e355e03195ff", size = 734013, upload-time = "2025-02-26T09:15:07.892Z" }, - { url = "https://files.pythonhosted.org/packages/7d/ff/41fcc4d3b7de837963622e8610d998710705bbde9a8a17221d85e5d0baad/safetensors-0.5.3-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:32c3ef2d7af8b9f52ff685ed0bc43913cdcde135089ae322ee576de93eae5135", size = 670687, upload-time = "2025-02-26T09:15:09.979Z" }, - { url = "https://files.pythonhosted.org/packages/40/ad/2b113098e69c985a3d8fbda4b902778eae4a35b7d5188859b4a63d30c161/safetensors-0.5.3-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:37f1521be045e56fc2b54c606d4455573e717b2d887c579ee1dbba5f868ece04", size = 643147, upload-time = "2025-02-26T09:15:11.185Z" }, - { url = "https://files.pythonhosted.org/packages/0a/0c/95aeb51d4246bd9a3242d3d8349c1112b4ee7611a4b40f0c5c93b05f001d/safetensors-0.5.3-cp38-abi3-win32.whl", hash = "sha256:cfc0ec0846dcf6763b0ed3d1846ff36008c6e7290683b61616c4b040f6a54ace", size = 296677, upload-time = "2025-02-26T09:15:16.554Z" }, - { url = "https://files.pythonhosted.org/packages/69/e2/b011c38e5394c4c18fb5500778a55ec43ad6106126e74723ffaee246f56e/safetensors-0.5.3-cp38-abi3-win_amd64.whl", hash = "sha256:836cbbc320b47e80acd40e44c8682db0e8ad7123209f69b093def21ec7cafd11", size = 308878, upload-time = "2025-02-26T09:15:14.99Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/29/9c/6e74567782559a63bd040a236edca26fd71bc7ba88de2ef35d75df3bca5e/safetensors-0.7.0.tar.gz", hash = "sha256:07663963b67e8bd9f0b8ad15bb9163606cd27cc5a1b96235a50d8369803b96b0", size = 200878, upload-time = "2025-11-19T15:18:43.199Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fa/47/aef6c06649039accf914afef490268e1067ed82be62bcfa5b7e886ad15e8/safetensors-0.7.0-cp38-abi3-macosx_10_12_x86_64.whl", hash = "sha256:c82f4d474cf725255d9e6acf17252991c3c8aac038d6ef363a4bf8be2f6db517", size = 467781, upload-time = "2025-11-19T15:18:35.84Z" }, + { url = "https://files.pythonhosted.org/packages/e8/00/374c0c068e30cd31f1e1b46b4b5738168ec79e7689ca82ee93ddfea05109/safetensors-0.7.0-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:94fd4858284736bb67a897a41608b5b0c2496c9bdb3bf2af1fa3409127f20d57", size = 447058, upload-time = "2025-11-19T15:18:34.416Z" }, + { url = "https://files.pythonhosted.org/packages/f1/06/578ffed52c2296f93d7fd2d844cabfa92be51a587c38c8afbb8ae449ca89/safetensors-0.7.0-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e07d91d0c92a31200f25351f4acb2bc6aff7f48094e13ebb1d0fb995b54b6542", size = 491748, upload-time = "2025-11-19T15:18:09.79Z" }, + { url = "https://files.pythonhosted.org/packages/ae/33/1debbbb70e4791dde185edb9413d1fe01619255abb64b300157d7f15dddd/safetensors-0.7.0-cp38-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8469155f4cb518bafb4acf4865e8bb9d6804110d2d9bdcaa78564b9fd841e104", size = 503881, upload-time = "2025-11-19T15:18:16.145Z" }, + { url = "https://files.pythonhosted.org/packages/8e/1c/40c2ca924d60792c3be509833df711b553c60effbd91da6f5284a83f7122/safetensors-0.7.0-cp38-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:54bef08bf00a2bff599982f6b08e8770e09cc012d7bba00783fc7ea38f1fb37d", size = 623463, upload-time = "2025-11-19T15:18:21.11Z" }, + { url = "https://files.pythonhosted.org/packages/9b/3a/13784a9364bd43b0d61eef4bea2845039bc2030458b16594a1bd787ae26e/safetensors-0.7.0-cp38-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:42cb091236206bb2016d245c377ed383aa7f78691748f3bb6ee1bfa51ae2ce6a", size = 532855, upload-time = "2025-11-19T15:18:25.719Z" }, + { url = "https://files.pythonhosted.org/packages/a0/60/429e9b1cb3fc651937727befe258ea24122d9663e4d5709a48c9cbfceecb/safetensors-0.7.0-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dac7252938f0696ddea46f5e855dd3138444e82236e3be475f54929f0c510d48", size = 507152, upload-time = "2025-11-19T15:18:33.023Z" }, + { url = "https://files.pythonhosted.org/packages/3c/a8/4b45e4e059270d17af60359713ffd83f97900d45a6afa73aaa0d737d48b6/safetensors-0.7.0-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1d060c70284127fa805085d8f10fbd0962792aed71879d00864acda69dbab981", size = 541856, upload-time = "2025-11-19T15:18:31.075Z" }, + { url = "https://files.pythonhosted.org/packages/06/87/d26d8407c44175d8ae164a95b5a62707fcc445f3c0c56108e37d98070a3d/safetensors-0.7.0-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:cdab83a366799fa730f90a4ebb563e494f28e9e92c4819e556152ad55e43591b", size = 674060, upload-time = "2025-11-19T15:18:37.211Z" }, + { url = "https://files.pythonhosted.org/packages/11/f5/57644a2ff08dc6325816ba7217e5095f17269dada2554b658442c66aed51/safetensors-0.7.0-cp38-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:672132907fcad9f2aedcb705b2d7b3b93354a2aec1b2f706c4db852abe338f85", size = 771715, upload-time = "2025-11-19T15:18:38.689Z" }, + { url = "https://files.pythonhosted.org/packages/86/31/17883e13a814bd278ae6e266b13282a01049b0c81341da7fd0e3e71a80a3/safetensors-0.7.0-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:5d72abdb8a4d56d4020713724ba81dac065fedb7f3667151c4a637f1d3fb26c0", size = 714377, upload-time = "2025-11-19T15:18:40.162Z" }, + { url = "https://files.pythonhosted.org/packages/4a/d8/0c8a7dc9b41dcac53c4cbf9df2b9c83e0e0097203de8b37a712b345c0be5/safetensors-0.7.0-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b0f6d66c1c538d5a94a73aa9ddca8ccc4227e6c9ff555322ea40bdd142391dd4", size = 677368, upload-time = "2025-11-19T15:18:41.627Z" }, + { url = "https://files.pythonhosted.org/packages/05/e5/cb4b713c8a93469e3c5be7c3f8d77d307e65fe89673e731f5c2bfd0a9237/safetensors-0.7.0-cp38-abi3-win32.whl", hash = "sha256:c74af94bf3ac15ac4d0f2a7c7b4663a15f8c2ab15ed0fc7531ca61d0835eccba", size = 326423, upload-time = "2025-11-19T15:18:45.74Z" }, + { url = "https://files.pythonhosted.org/packages/5d/e6/ec8471c8072382cb91233ba7267fd931219753bb43814cbc71757bfd4dab/safetensors-0.7.0-cp38-abi3-win_amd64.whl", hash = "sha256:d1239932053f56f3456f32eb9625590cc7582e905021f94636202a864d470755", size = 341380, upload-time = "2025-11-19T15:18:44.427Z" }, + { url = "https://files.pythonhosted.org/packages/a7/6a/4d08d89a6fcbe905c5ae68b8b34f0791850882fc19782d0d02c65abbdf3b/safetensors-0.7.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4729811a6640d019a4b7ba8638ee2fd21fa5ca8c7e7bdf0fed62068fcaac737", size = 492430, upload-time = "2025-11-19T15:18:11.884Z" }, + { url = "https://files.pythonhosted.org/packages/dd/29/59ed8152b30f72c42d00d241e58eaca558ae9dbfa5695206e2e0f54c7063/safetensors-0.7.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:12f49080303fa6bb424b362149a12949dfbbf1e06811a88f2307276b0c131afd", size = 503977, upload-time = "2025-11-19T15:18:17.523Z" }, + { url = "https://files.pythonhosted.org/packages/d3/0b/4811bfec67fa260e791369b16dab105e4bae82686120554cc484064e22b4/safetensors-0.7.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0071bffba4150c2f46cae1432d31995d77acfd9f8db598b5d1a2ce67e8440ad2", size = 623890, upload-time = "2025-11-19T15:18:22.666Z" }, + { url = "https://files.pythonhosted.org/packages/58/5b/632a58724221ef03d78ab65062e82a1010e1bef8e8e0b9d7c6d7b8044841/safetensors-0.7.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:473b32699f4200e69801bf5abf93f1a4ecd432a70984df164fc22ccf39c4a6f3", size = 531885, upload-time = "2025-11-19T15:18:27.146Z" }, ] [[package]] @@ -4000,14 +11989,17 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "imageio" }, { name = "lazy-loader" }, - { name = "networkx", version = "3.4.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "networkx", version = "3.5", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "numpy" }, + { name = "networkx", version = "3.4.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "networkx", version = "3.6.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "1.26.4", source = { registry = "https://pypi.org/simple" }, marker = "extra == 'extra-16-inference-models-onnx-jp6-cu126' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and extra == 'extra-16-inference-models-falcon-perception') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "2.4.4", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and extra == 'extra-16-inference-models-falcon-perception') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, { name = "packaging" }, { name = "pillow" }, - { name = "scipy" }, - { name = "tifffile", version = "2025.5.10", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "tifffile", version = "2025.9.9", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "scipy", version = "1.15.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "scipy", version = "1.17.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "tifffile", version = "2025.5.10", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "tifffile", version = "2026.3.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, ] sdist = { url = "https://files.pythonhosted.org/packages/c7/a8/3c0f256012b93dd2cb6fda9245e9f4bff7dc0486880b248005f15ea2255e/scikit_image-0.25.2.tar.gz", hash = "sha256:e5a37e6cd4d0c018a7a55b9d601357e3382826d3888c10d0213fc63bff977dde", size = 22693594, upload-time = "2025-02-18T18:05:24.538Z" } wheels = [ @@ -4032,8 +12024,16 @@ wheels = [ name = "scipy" version = "1.15.3" source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin'", +] dependencies = [ - { name = "numpy" }, + { name = "numpy", version = "1.26.4", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and extra == 'extra-16-inference-models-falcon-perception') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, ] sdist = { url = "https://files.pythonhosted.org/packages/0f/37/6964b830433e654ec7485e45a00fc9a27cf868d622838f6b6d9c5ec0d532/scipy-1.15.3.tar.gz", hash = "sha256:eae3cf522bc7df64b42cad3925c876e1b0b6c35c1337c93e12c0f366f55b0eaf", size = 59419214, upload-time = "2025-05-08T16:13:05.955Z" } wheels = [ @@ -4066,31 +12066,682 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/e6/eb/3bf6ea8ab7f1503dca3a10df2e4b9c3f6b3316df07f6c0ded94b281c7101/scipy-1.15.3-cp312-cp312-win_amd64.whl", hash = "sha256:52092bc0472cfd17df49ff17e70624345efece4e1a12b23783a1ac59a1b728ed", size = 40966184, upload-time = "2025-05-08T16:06:52.623Z" }, ] +[[package]] +name = "scipy" +version = "1.17.1" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", +] +dependencies = [ + { name = "numpy", version = "1.26.4", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "2.4.4", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and extra == 'extra-16-inference-models-falcon-perception') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/7a/97/5a3609c4f8d58b039179648e62dd220f89864f56f7357f5d4f45c29eb2cc/scipy-1.17.1.tar.gz", hash = "sha256:95d8e012d8cb8816c226aef832200b1d45109ed4464303e997c5b13122b297c0", size = 30573822, upload-time = "2026-02-23T00:26:24.851Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/df/75/b4ce781849931fef6fd529afa6b63711d5a733065722d0c3e2724af9e40a/scipy-1.17.1-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:1f95b894f13729334fb990162e911c9e5dc1ab390c58aa6cbecb389c5b5e28ec", size = 31613675, upload-time = "2026-02-23T00:16:00.13Z" }, + { url = "https://files.pythonhosted.org/packages/f7/58/bccc2861b305abdd1b8663d6130c0b3d7cc22e8d86663edbc8401bfd40d4/scipy-1.17.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:e18f12c6b0bc5a592ed23d3f7b891f68fd7f8241d69b7883769eb5d5dfb52696", size = 28162057, upload-time = "2026-02-23T00:16:09.456Z" }, + { url = "https://files.pythonhosted.org/packages/6d/ee/18146b7757ed4976276b9c9819108adbc73c5aad636e5353e20746b73069/scipy-1.17.1-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:a3472cfbca0a54177d0faa68f697d8ba4c80bbdc19908c3465556d9f7efce9ee", size = 20334032, upload-time = "2026-02-23T00:16:17.358Z" }, + { url = "https://files.pythonhosted.org/packages/ec/e6/cef1cf3557f0c54954198554a10016b6a03b2ec9e22a4e1df734936bd99c/scipy-1.17.1-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:766e0dc5a616d026a3a1cffa379af959671729083882f50307e18175797b3dfd", size = 22709533, upload-time = "2026-02-23T00:16:25.791Z" }, + { url = "https://files.pythonhosted.org/packages/4d/60/8804678875fc59362b0fb759ab3ecce1f09c10a735680318ac30da8cd76b/scipy-1.17.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:744b2bf3640d907b79f3fd7874efe432d1cf171ee721243e350f55234b4cec4c", size = 33062057, upload-time = "2026-02-23T00:16:36.931Z" }, + { url = "https://files.pythonhosted.org/packages/09/7d/af933f0f6e0767995b4e2d705a0665e454d1c19402aa7e895de3951ebb04/scipy-1.17.1-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:43af8d1f3bea642559019edfe64e9b11192a8978efbd1539d7bc2aaa23d92de4", size = 35349300, upload-time = "2026-02-23T00:16:49.108Z" }, + { url = "https://files.pythonhosted.org/packages/b4/3d/7ccbbdcbb54c8fdc20d3b6930137c782a163fa626f0aef920349873421ba/scipy-1.17.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cd96a1898c0a47be4520327e01f874acfd61fb48a9420f8aa9f6483412ffa444", size = 35127333, upload-time = "2026-02-23T00:17:01.293Z" }, + { url = "https://files.pythonhosted.org/packages/e8/19/f926cb11c42b15ba08e3a71e376d816ac08614f769b4f47e06c3580c836a/scipy-1.17.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4eb6c25dd62ee8d5edf68a8e1c171dd71c292fdae95d8aeb3dd7d7de4c364082", size = 37741314, upload-time = "2026-02-23T00:17:12.576Z" }, + { url = "https://files.pythonhosted.org/packages/95/da/0d1df507cf574b3f224ccc3d45244c9a1d732c81dcb26b1e8a766ae271a8/scipy-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:d30e57c72013c2a4fe441c2fcb8e77b14e152ad48b5464858e07e2ad9fbfceff", size = 36607512, upload-time = "2026-02-23T00:17:23.424Z" }, + { url = "https://files.pythonhosted.org/packages/68/7f/bdd79ceaad24b671543ffe0ef61ed8e659440eb683b66f033454dcee90eb/scipy-1.17.1-cp311-cp311-win_arm64.whl", hash = "sha256:9ecb4efb1cd6e8c4afea0daa91a87fbddbce1b99d2895d151596716c0b2e859d", size = 24599248, upload-time = "2026-02-23T00:17:34.561Z" }, + { url = "https://files.pythonhosted.org/packages/35/48/b992b488d6f299dbe3f11a20b24d3dda3d46f1a635ede1c46b5b17a7b163/scipy-1.17.1-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:35c3a56d2ef83efc372eaec584314bd0ef2e2f0d2adb21c55e6ad5b344c0dcb8", size = 31610954, upload-time = "2026-02-23T00:17:49.855Z" }, + { url = "https://files.pythonhosted.org/packages/b2/02/cf107b01494c19dc100f1d0b7ac3cc08666e96ba2d64db7626066cee895e/scipy-1.17.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:fcb310ddb270a06114bb64bbe53c94926b943f5b7f0842194d585c65eb4edd76", size = 28172662, upload-time = "2026-02-23T00:18:01.64Z" }, + { url = "https://files.pythonhosted.org/packages/cf/a9/599c28631bad314d219cf9ffd40e985b24d603fc8a2f4ccc5ae8419a535b/scipy-1.17.1-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:cc90d2e9c7e5c7f1a482c9875007c095c3194b1cfedca3c2f3291cdc2bc7c086", size = 20344366, upload-time = "2026-02-23T00:18:12.015Z" }, + { url = "https://files.pythonhosted.org/packages/35/f5/906eda513271c8deb5af284e5ef0206d17a96239af79f9fa0aebfe0e36b4/scipy-1.17.1-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:c80be5ede8f3f8eded4eff73cc99a25c388ce98e555b17d31da05287015ffa5b", size = 22704017, upload-time = "2026-02-23T00:18:21.502Z" }, + { url = "https://files.pythonhosted.org/packages/da/34/16f10e3042d2f1d6b66e0428308ab52224b6a23049cb2f5c1756f713815f/scipy-1.17.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e19ebea31758fac5893a2ac360fedd00116cbb7628e650842a6691ba7ca28a21", size = 32927842, upload-time = "2026-02-23T00:18:35.367Z" }, + { url = "https://files.pythonhosted.org/packages/01/8e/1e35281b8ab6d5d72ebe9911edcdffa3f36b04ed9d51dec6dd140396e220/scipy-1.17.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:02ae3b274fde71c5e92ac4d54bc06c42d80e399fec704383dcd99b301df37458", size = 35235890, upload-time = "2026-02-23T00:18:49.188Z" }, + { url = "https://files.pythonhosted.org/packages/c5/5c/9d7f4c88bea6e0d5a4f1bc0506a53a00e9fcb198de372bfe4d3652cef482/scipy-1.17.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8a604bae87c6195d8b1045eddece0514d041604b14f2727bbc2b3020172045eb", size = 35003557, upload-time = "2026-02-23T00:18:54.74Z" }, + { url = "https://files.pythonhosted.org/packages/65/94/7698add8f276dbab7a9de9fb6b0e02fc13ee61d51c7c3f85ac28b65e1239/scipy-1.17.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f590cd684941912d10becc07325a3eeb77886fe981415660d9265c4c418d0bea", size = 37625856, upload-time = "2026-02-23T00:19:00.307Z" }, + { url = "https://files.pythonhosted.org/packages/a2/84/dc08d77fbf3d87d3ee27f6a0c6dcce1de5829a64f2eae85a0ecc1f0daa73/scipy-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:41b71f4a3a4cab9d366cd9065b288efc4d4f3c0b37a91a8e0947fb5bd7f31d87", size = 36549682, upload-time = "2026-02-23T00:19:07.67Z" }, + { url = "https://files.pythonhosted.org/packages/bc/98/fe9ae9ffb3b54b62559f52dedaebe204b408db8109a8c66fdd04869e6424/scipy-1.17.1-cp312-cp312-win_arm64.whl", hash = "sha256:f4115102802df98b2b0db3cce5cb9b92572633a1197c77b7553e5203f284a5b3", size = 24547340, upload-time = "2026-02-23T00:19:12.024Z" }, +] + [[package]] name = "segmentation-models-pytorch" version = "0.5.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "huggingface-hub" }, - { name = "numpy" }, + { name = "numpy", version = "1.26.4", source = { registry = "https://pypi.org/simple" }, marker = "extra == 'extra-16-inference-models-onnx-jp6-cu126' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and extra == 'extra-16-inference-models-falcon-perception') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "2.4.4", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and extra == 'extra-16-inference-models-falcon-perception') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, { name = "pillow" }, { name = "safetensors" }, { name = "timm" }, - { name = "torch", version = "2.6.0+cu124", source = { registry = "https://download.pytorch.org/whl/cu124" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torch", version = "2.7.1", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torch", version = "2.7.1+cpu", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torch", version = "2.7.1+cu118", source = { registry = "https://download.pytorch.org/whl/cu118" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torch", version = "2.7.1+cu128", source = { registry = "https://download.pytorch.org/whl/cu128" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torch", version = "2.8.0", source = { registry = "https://pypi.jetson-ai-lab.io/jp6/cu126/+simple" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torch", version = "2.8.0", source = { registry = "https://pypi.org/simple" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torchvision", version = "0.21.0+cu124", source = { registry = "https://download.pytorch.org/whl/cu124" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torchvision", version = "0.22.1", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torchvision", version = "0.22.1", source = { registry = "https://download.pytorch.org/whl/cu128" }, marker = "(platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torchvision", version = "0.22.1+cpu", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(platform_machine != 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torchvision", version = "0.22.1+cu118", source = { registry = "https://download.pytorch.org/whl/cu118" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torchvision", version = "0.22.1+cu128", source = { registry = "https://download.pytorch.org/whl/cu128" }, marker = "(platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torchvision", version = "0.23.0", source = { registry = "https://pypi.jetson-ai-lab.io/jp6/cu126/+simple" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torchvision", version = "0.23.0", source = { registry = "https://pypi.org/simple" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torch", version = "2.6.0+cu124", source = { registry = "https://download.pytorch.org/whl/cu124" }, marker = "extra == 'extra-16-inference-models-torch-cu124' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torch", version = "2.7.1+cu118", source = { registry = "https://download.pytorch.org/whl/cu118" }, marker = "extra == 'extra-16-inference-models-torch-cu118' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torch", version = "2.11.0", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torch", version = "2.11.0", source = { registry = "https://pypi.jetson-ai-lab.io/jp6/cu126/+simple" }, marker = "extra == 'extra-16-inference-models-torch-jp6-cu126' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126')" }, + { name = "torch", version = "2.11.0", source = { registry = "https://pypi.org/simple" }, marker = "extra == 'extra-16-inference-models-torch-cu126' or extra == 'extra-16-inference-models-torch-cu130' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torch", version = "2.11.0+cpu", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torch", version = "2.11.0+cu128", source = { registry = "https://download.pytorch.org/whl/cu128" }, marker = "extra == 'extra-16-inference-models-torch-cu128' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torchvision", version = "0.21.0+cu124", source = { registry = "https://download.pytorch.org/whl/cu124" }, marker = "extra == 'extra-16-inference-models-torch-cu124' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torchvision", version = "0.22.1+cu118", source = { registry = "https://download.pytorch.org/whl/cu118" }, marker = "extra == 'extra-16-inference-models-torch-cu118' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torchvision", version = "0.26.0", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torchvision", version = "0.26.0", source = { registry = "https://pypi.jetson-ai-lab.io/jp6/cu126/+simple" }, marker = "extra == 'extra-16-inference-models-torch-jp6-cu126' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126')" }, + { name = "torchvision", version = "0.26.0", source = { registry = "https://pypi.org/simple" }, marker = "extra == 'extra-16-inference-models-torch-cu126' or extra == 'extra-16-inference-models-torch-cu130' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torchvision", version = "0.26.0+cpu", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torchvision", version = "0.26.0+cu128", source = { registry = "https://download.pytorch.org/whl/cu128" }, marker = "extra == 'extra-16-inference-models-torch-cu128' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, { name = "tqdm" }, ] sdist = { url = "https://files.pythonhosted.org/packages/15/fa/d5a29d49240fb10bdead608b4d0c6805684a8f63b1f65863502be65b1ca4/segmentation_models_pytorch-0.5.0.tar.gz", hash = "sha256:cabba8aced6ef7bdcd6288dd9e1dc2840848aa819d539c455bd07aeceb2fdf96", size = 105150, upload-time = "2025-04-17T10:43:45.755Z" } @@ -4100,78 +12751,80 @@ wheels = [ [[package]] name = "sentencepiece" -version = "0.2.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/c9/d2/b9c7ca067c26d8ff085d252c89b5f69609ca93fb85a00ede95f4857865d4/sentencepiece-0.2.0.tar.gz", hash = "sha256:a52c19171daaf2e697dc6cbe67684e0fa341b1248966f6aebb541de654d15843", size = 2632106, upload-time = "2024-02-19T17:06:47.428Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/f6/71/98648c3b64b23edb5403f74bcc906ad21766872a6e1ada26ea3f1eb941ab/sentencepiece-0.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:188779e1298a1c8b8253c7d3ad729cb0a9891e5cef5e5d07ce4592c54869e227", size = 2408979, upload-time = "2024-02-19T17:05:34.651Z" }, - { url = "https://files.pythonhosted.org/packages/77/9f/7efbaa6d4c0c718a9affbecc536b03ca62f99f421bdffb531c16030e2d2b/sentencepiece-0.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bed9cf85b296fa2b76fc2547b9cbb691a523864cebaee86304c43a7b4cb1b452", size = 1238845, upload-time = "2024-02-19T17:05:37.371Z" }, - { url = "https://files.pythonhosted.org/packages/1c/e4/c2541027a43ec6962ba9b601805d17ba3f86b38bdeae0e8ac65a2981e248/sentencepiece-0.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d7b67e724bead13f18db6e1d10b6bbdc454af574d70efbb36f27d90387be1ca3", size = 1181472, upload-time = "2024-02-19T17:05:39.775Z" }, - { url = "https://files.pythonhosted.org/packages/fd/46/316c1ba6c52b97de76aff7b9da678f7afbb52136afb2987c474d95630e65/sentencepiece-0.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2fde4b08cfe237be4484c6c7c2e2c75fb862cfeab6bd5449ce4caeafd97b767a", size = 1259151, upload-time = "2024-02-19T17:05:42.594Z" }, - { url = "https://files.pythonhosted.org/packages/aa/5a/3c48738a0835d76dd06c62b6ac48d39c923cde78dd0f587353bdcbb99851/sentencepiece-0.2.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c378492056202d1c48a4979650981635fd97875a00eabb1f00c6a236b013b5e", size = 1355931, upload-time = "2024-02-19T17:05:44.695Z" }, - { url = "https://files.pythonhosted.org/packages/a6/27/33019685023221ca8ed98e8ceb7ae5e166032686fa3662c68f1f1edf334e/sentencepiece-0.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1380ce6540a368de2ef6d7e6ba14ba8f3258df650d39ba7d833b79ee68a52040", size = 1301537, upload-time = "2024-02-19T17:05:46.713Z" }, - { url = "https://files.pythonhosted.org/packages/ca/e4/55f97cef14293171fef5f96e96999919ab5b4d1ce95b53547ad653d7e3bf/sentencepiece-0.2.0-cp310-cp310-win32.whl", hash = "sha256:a1151d6a6dd4b43e552394aed0edfe9292820272f0194bd56c7c1660a0c06c3d", size = 936747, upload-time = "2024-02-19T17:05:48.705Z" }, - { url = "https://files.pythonhosted.org/packages/85/f4/4ef1a6e0e9dbd8a60780a91df8b7452ada14cfaa0e17b3b8dfa42cecae18/sentencepiece-0.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:d490142b0521ef22bc1085f061d922a2a6666175bb6b42e588ff95c0db6819b2", size = 991525, upload-time = "2024-02-19T17:05:55.145Z" }, - { url = "https://files.pythonhosted.org/packages/32/43/8f8885168a47a02eba1455bd3f4f169f50ad5b8cebd2402d0f5e20854d04/sentencepiece-0.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:17982700c4f6dbb55fa3594f3d7e5dd1c8659a274af3738e33c987d2a27c9d5c", size = 2409036, upload-time = "2024-02-19T17:05:58.021Z" }, - { url = "https://files.pythonhosted.org/packages/0f/35/e63ba28062af0a3d688a9f128e407a1a2608544b2f480cb49bf7f4b1cbb9/sentencepiece-0.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7c867012c0e8bcd5bdad0f791609101cb5c66acb303ab3270218d6debc68a65e", size = 1238921, upload-time = "2024-02-19T17:06:06.434Z" }, - { url = "https://files.pythonhosted.org/packages/de/42/ae30952c4a0bd773e90c9bf2579f5533037c886dfc8ec68133d5694f4dd2/sentencepiece-0.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7fd6071249c74f779c5b27183295b9202f8dedb68034e716784364443879eaa6", size = 1181477, upload-time = "2024-02-19T17:06:09.292Z" }, - { url = "https://files.pythonhosted.org/packages/e3/ac/2f2ab1d60bb2d795d054eebe5e3f24b164bc21b5a9b75fba7968b3b91b5a/sentencepiece-0.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27f90c55a65013cbb8f4d7aab0599bf925cde4adc67ae43a0d323677b5a1c6cb", size = 1259182, upload-time = "2024-02-19T17:06:16.459Z" }, - { url = "https://files.pythonhosted.org/packages/45/fb/14633c6ecf262c468759ffcdb55c3a7ee38fe4eda6a70d75ee7c7d63c58b/sentencepiece-0.2.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b293734059ef656dcd65be62ff771507bea8fed0a711b6733976e1ed3add4553", size = 1355537, upload-time = "2024-02-19T17:06:19.274Z" }, - { url = "https://files.pythonhosted.org/packages/fb/12/2f5c8d4764b00033cf1c935b702d3bb878d10be9f0b87f0253495832d85f/sentencepiece-0.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e58b47f933aca74c6a60a79dcb21d5b9e47416256c795c2d58d55cec27f9551d", size = 1301464, upload-time = "2024-02-19T17:06:21.796Z" }, - { url = "https://files.pythonhosted.org/packages/4e/b1/67afc0bde24f6dcb3acdea0dd8dcdf4b8b0db240f6bacd39378bd32d09f8/sentencepiece-0.2.0-cp311-cp311-win32.whl", hash = "sha256:c581258cf346b327c62c4f1cebd32691826306f6a41d8c4bec43b010dee08e75", size = 936749, upload-time = "2024-02-19T17:06:24.167Z" }, - { url = "https://files.pythonhosted.org/packages/a2/f6/587c62fd21fc988555b85351f50bbde43a51524caafd63bc69240ded14fd/sentencepiece-0.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:0993dbc665f4113017892f1b87c3904a44d0640eda510abcacdfb07f74286d36", size = 991520, upload-time = "2024-02-19T17:06:26.936Z" }, - { url = "https://files.pythonhosted.org/packages/27/5a/141b227ed54293360a9ffbb7bf8252b4e5efc0400cdeac5809340e5d2b21/sentencepiece-0.2.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:ea5f536e32ea8ec96086ee00d7a4a131ce583a1b18d130711707c10e69601cb2", size = 2409370, upload-time = "2024-02-19T17:06:29.315Z" }, - { url = "https://files.pythonhosted.org/packages/2e/08/a4c135ad6fc2ce26798d14ab72790d66e813efc9589fd30a5316a88ca8d5/sentencepiece-0.2.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d0cb51f53b6aae3c36bafe41e86167c71af8370a039f542c43b0cce5ef24a68c", size = 1239288, upload-time = "2024-02-19T17:06:31.674Z" }, - { url = "https://files.pythonhosted.org/packages/49/0a/2fe387f825ac5aad5a0bfe221904882106cac58e1b693ba7818785a882b6/sentencepiece-0.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3212121805afc58d8b00ab4e7dd1f8f76c203ddb9dc94aa4079618a31cf5da0f", size = 1181597, upload-time = "2024-02-19T17:06:33.763Z" }, - { url = "https://files.pythonhosted.org/packages/cc/38/e4698ee2293fe4835dc033c49796a39b3eebd8752098f6bd0aa53a14af1f/sentencepiece-0.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a3149e3066c2a75e0d68a43eb632d7ae728c7925b517f4c05c40f6f7280ce08", size = 1259220, upload-time = "2024-02-19T17:06:35.85Z" }, - { url = "https://files.pythonhosted.org/packages/12/24/fd7ef967c9dad2f6e6e5386d0cadaf65cda8b7be6e3861a9ab3121035139/sentencepiece-0.2.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:632f3594d3e7ac8b367bca204cb3fd05a01d5b21455acd097ea4c0e30e2f63d7", size = 1355962, upload-time = "2024-02-19T17:06:38.616Z" }, - { url = "https://files.pythonhosted.org/packages/4f/d2/18246f43ca730bb81918f87b7e886531eda32d835811ad9f4657c54eee35/sentencepiece-0.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f295105c6bdbb05bd5e1b0cafbd78ff95036f5d3641e7949455a3f4e5e7c3109", size = 1301706, upload-time = "2024-02-19T17:06:40.712Z" }, - { url = "https://files.pythonhosted.org/packages/8a/47/ca237b562f420044ab56ddb4c278672f7e8c866e183730a20e413b38a989/sentencepiece-0.2.0-cp312-cp312-win32.whl", hash = "sha256:fb89f811e5efd18bab141afc3fea3de141c3f69f3fe9e898f710ae7fe3aab251", size = 936941, upload-time = "2024-02-19T17:06:42.802Z" }, - { url = "https://files.pythonhosted.org/packages/c6/97/d159c32642306ee2b70732077632895438867b3b6df282354bd550cf2a67/sentencepiece-0.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:7a673a72aab81fef5ebe755c6e0cc60087d1f3a4700835d40537183c1703a45f", size = 991994, upload-time = "2024-02-19T17:06:45.01Z" }, +version = "0.2.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/15/15/2e7a025fc62d764b151ae6d0f2a92f8081755ebe8d4a64099accc6f77ba6/sentencepiece-0.2.1.tar.gz", hash = "sha256:8138cec27c2f2282f4a34d9a016e3374cd40e5c6e9cb335063db66a0a3b71fad", size = 3228515, upload-time = "2025-08-12T07:00:51.718Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/af/31/5b7cccb307b485db1a2372d6d2980b0a65d067f8be5ca943a103b4acd5b3/sentencepiece-0.2.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e10fa50bdbaa5e2445dbd387979980d391760faf0ec99a09bd7780ff37eaec44", size = 1942557, upload-time = "2025-08-12T06:59:12.379Z" }, + { url = "https://files.pythonhosted.org/packages/1f/41/0ac923a8e685ad290c5afc8ae55c5844977b8d75076fcc04302b9a324274/sentencepiece-0.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2f27ae6deea72efdb6f361750c92f6c21fd0ad087445082770cc34015213c526", size = 1325384, upload-time = "2025-08-12T06:59:14.334Z" }, + { url = "https://files.pythonhosted.org/packages/fc/ef/3751555d67daf9003384978f169d31c775cb5c7baf28633caaf1eb2b2b4d/sentencepiece-0.2.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:60937c959e6f44159fdd9f56fbdd302501f96114a5ba436829496d5f32d8de3f", size = 1253317, upload-time = "2025-08-12T06:59:16.247Z" }, + { url = "https://files.pythonhosted.org/packages/46/a5/742c69b7bd144eb32b6e5fd50dbd8abbbc7a95fce2fe16e50156fa400e3b/sentencepiece-0.2.1-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d8b1d91545578852f128650b8cce4ec20f93d39b378ff554ebe66290f2dabb92", size = 1316379, upload-time = "2025-08-12T06:59:17.825Z" }, + { url = "https://files.pythonhosted.org/packages/c8/89/8deeafbba2871e8fa10f20f17447786f4ac38085925335728d360eaf4cae/sentencepiece-0.2.1-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:27e38eee653abc3d387862e67bc5c8b6f428cd604e688b85d29170b7e725c26c", size = 1387926, upload-time = "2025-08-12T06:59:19.395Z" }, + { url = "https://files.pythonhosted.org/packages/c3/ca/67fe73005f0ab617c6a970b199754e28e524b6873aa7025224fad3cda252/sentencepiece-0.2.1-cp310-cp310-win32.whl", hash = "sha256:251874d720ac7f28024a168501f3c7bb15d1802245f6e66de565f18bbb9b5eaa", size = 999550, upload-time = "2025-08-12T06:59:20.844Z" }, + { url = "https://files.pythonhosted.org/packages/6d/33/dc5b54042050d2dda4229c3ce1f862541c99966390b6aa20f54d520d2dc2/sentencepiece-0.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:e52144670738b4b477fade6c2a9b6af71a8d0094514c9853ac9f6fc1fcfabae7", size = 1054613, upload-time = "2025-08-12T06:59:22.255Z" }, + { url = "https://files.pythonhosted.org/packages/fa/19/1ea47f46ff97fe04422b78997da1a37cd632f414aae042d27a9009c5b733/sentencepiece-0.2.1-cp310-cp310-win_arm64.whl", hash = "sha256:9076430ac25dfa7147d9d05751dbc66a04bc1aaac371c07f84952979ea59f0d0", size = 1033884, upload-time = "2025-08-12T06:59:24.194Z" }, + { url = "https://files.pythonhosted.org/packages/d8/15/46afbab00733d81788b64be430ca1b93011bb9388527958e26cc31832de5/sentencepiece-0.2.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6356d0986b8b8dc351b943150fcd81a1c6e6e4d439772e8584c64230e58ca987", size = 1942560, upload-time = "2025-08-12T06:59:25.82Z" }, + { url = "https://files.pythonhosted.org/packages/fa/79/7c01b8ef98a0567e9d84a4e7a910f8e7074fcbf398a5cd76f93f4b9316f9/sentencepiece-0.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8f8ba89a3acb3dc1ae90f65ec1894b0b9596fdb98ab003ff38e058f898b39bc7", size = 1325385, upload-time = "2025-08-12T06:59:27.722Z" }, + { url = "https://files.pythonhosted.org/packages/bb/88/2b41e07bd24f33dcf2f18ec3b74247aa4af3526bad8907b8727ea3caba03/sentencepiece-0.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:02593eca45440ef39247cee8c47322a34bdcc1d8ae83ad28ba5a899a2cf8d79a", size = 1253319, upload-time = "2025-08-12T06:59:29.306Z" }, + { url = "https://files.pythonhosted.org/packages/a0/54/38a1af0c6210a3c6f95aa46d23d6640636d020fba7135cd0d9a84ada05a7/sentencepiece-0.2.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0a0d15781a171d188b661ae4bde1d998c303f6bd8621498c50c671bd45a4798e", size = 1316162, upload-time = "2025-08-12T06:59:30.914Z" }, + { url = "https://files.pythonhosted.org/packages/ef/66/fb191403ade791ad2c3c1e72fe8413e63781b08cfa3aa4c9dfc536d6e795/sentencepiece-0.2.1-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4f5a3e0d9f445ed9d66c0fec47d4b23d12cfc858b407a03c194c1b26c2ac2a63", size = 1387785, upload-time = "2025-08-12T06:59:32.491Z" }, + { url = "https://files.pythonhosted.org/packages/a9/2d/3bd9b08e70067b2124518b308db6a84a4f8901cc8a4317e2e4288cdd9b4d/sentencepiece-0.2.1-cp311-cp311-win32.whl", hash = "sha256:6d297a1748d429ba8534eebe5535448d78b8acc32d00a29b49acf28102eeb094", size = 999555, upload-time = "2025-08-12T06:59:34.475Z" }, + { url = "https://files.pythonhosted.org/packages/32/b8/f709977f5fda195ae1ea24f24e7c581163b6f142b1005bc3d0bbfe4d7082/sentencepiece-0.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:82d9ead6591015f009cb1be1cb1c015d5e6f04046dbb8c9588b931e869a29728", size = 1054617, upload-time = "2025-08-12T06:59:36.461Z" }, + { url = "https://files.pythonhosted.org/packages/7a/40/a1fc23be23067da0f703709797b464e8a30a1c78cc8a687120cd58d4d509/sentencepiece-0.2.1-cp311-cp311-win_arm64.whl", hash = "sha256:39f8651bd10974eafb9834ce30d9bcf5b73e1fc798a7f7d2528f9820ca86e119", size = 1033877, upload-time = "2025-08-12T06:59:38.391Z" }, + { url = "https://files.pythonhosted.org/packages/4a/be/32ce495aa1d0e0c323dcb1ba87096037358edee539cac5baf8755a6bd396/sentencepiece-0.2.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:57cae326c8727de58c85977b175af132a7138d84c764635d7e71bbee7e774133", size = 1943152, upload-time = "2025-08-12T06:59:40.048Z" }, + { url = "https://files.pythonhosted.org/packages/88/7e/ff23008899a58678e98c6ff592bf4d368eee5a71af96d0df6b38a039dd4f/sentencepiece-0.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:56dd39a3c4d6493db3cdca7e8cc68c6b633f0d4195495cbadfcf5af8a22d05a6", size = 1325651, upload-time = "2025-08-12T06:59:41.536Z" }, + { url = "https://files.pythonhosted.org/packages/19/84/42eb3ce4796777a1b5d3699dfd4dca85113e68b637f194a6c8d786f16a04/sentencepiece-0.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d9381351182ff9888cc80e41c632e7e274b106f450de33d67a9e8f6043da6f76", size = 1253645, upload-time = "2025-08-12T06:59:42.903Z" }, + { url = "https://files.pythonhosted.org/packages/89/fa/d3d5ebcba3cb9e6d3775a096251860c41a6bc53a1b9461151df83fe93255/sentencepiece-0.2.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:99f955df238021bf11f0fc37cdb54fd5e5b5f7fd30ecc3d93fb48b6815437167", size = 1316273, upload-time = "2025-08-12T06:59:44.476Z" }, + { url = "https://files.pythonhosted.org/packages/04/88/14f2f4a2b922d8b39be45bf63d79e6cd3a9b2f248b2fcb98a69b12af12f5/sentencepiece-0.2.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0cdfecef430d985f1c2bcbfff3defd1d95dae876fbd0173376012d2d7d24044b", size = 1387881, upload-time = "2025-08-12T06:59:46.09Z" }, + { url = "https://files.pythonhosted.org/packages/fd/b8/903e5ccb77b4ef140605d5d71b4f9e0ad95d456d6184688073ed11712809/sentencepiece-0.2.1-cp312-cp312-win32.whl", hash = "sha256:a483fd29a34c3e34c39ac5556b0a90942bec253d260235729e50976f5dba1068", size = 999540, upload-time = "2025-08-12T06:59:48.023Z" }, + { url = "https://files.pythonhosted.org/packages/2d/81/92df5673c067148c2545b1bfe49adfd775bcc3a169a047f5a0e6575ddaca/sentencepiece-0.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:4cdc7c36234fda305e85c32949c5211faaf8dd886096c7cea289ddc12a2d02de", size = 1054671, upload-time = "2025-08-12T06:59:49.895Z" }, + { url = "https://files.pythonhosted.org/packages/fe/02/c5e3bc518655d714622bec87d83db9cdba1cd0619a4a04e2109751c4f47f/sentencepiece-0.2.1-cp312-cp312-win_arm64.whl", hash = "sha256:daeb5e9e9fcad012324807856113708614d534f596d5008638eb9b40112cd9e4", size = 1033923, upload-time = "2025-08-12T06:59:51.952Z" }, ] [[package]] name = "setuptools" -version = "80.9.0" +version = "81.0.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/18/5d/3bf57dcd21979b887f014ea83c24ae194cfcd12b9e0fda66b957c69d1fca/setuptools-80.9.0.tar.gz", hash = "sha256:f36b47402ecde768dbfafc46e8e4207b4360c654f1f3bb84475f0a28628fb19c", size = 1319958, upload-time = "2025-05-27T00:56:51.443Z" } +sdist = { url = "https://files.pythonhosted.org/packages/0d/1c/73e719955c59b8e424d015ab450f51c0af856ae46ea2da83eba51cc88de1/setuptools-81.0.0.tar.gz", hash = "sha256:487b53915f52501f0a79ccfd0c02c165ffe06631443a886740b91af4b7a5845a", size = 1198299, upload-time = "2026-02-06T21:10:39.601Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a3/dc/17031897dae0efacfea57dfd3a82fdd2a2aeb58e0ff71b77b87e44edc772/setuptools-80.9.0-py3-none-any.whl", hash = "sha256:062d34222ad13e0cc312a4c02d73f059e86a4acbfbdea8f8f76b28c99f306922", size = 1201486, upload-time = "2025-05-27T00:56:49.664Z" }, + { url = "https://files.pythonhosted.org/packages/e1/e3/c164c88b2e5ce7b24d667b9bd83589cf4f3520d97cad01534cd3c4f55fdb/setuptools-81.0.0-py3-none-any.whl", hash = "sha256:fdd925d5c5d9f62e4b74b30d6dd7828ce236fd6ed998a08d81de62ce5a6310d6", size = 1062021, upload-time = "2026-02-06T21:10:37.175Z" }, ] [[package]] name = "shapely" -version = "2.1.1" +version = "2.1.2" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "numpy" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/ca/3c/2da625233f4e605155926566c0e7ea8dda361877f48e8b1655e53456f252/shapely-2.1.1.tar.gz", hash = "sha256:500621967f2ffe9642454808009044c21e5b35db89ce69f8a2042c2ffd0e2772", size = 315422, upload-time = "2025-05-19T11:04:41.265Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/82/fa/f18025c95b86116dd8f1ec58cab078bd59ab51456b448136ca27463be533/shapely-2.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d8ccc872a632acb7bdcb69e5e78df27213f7efd195882668ffba5405497337c6", size = 1825117, upload-time = "2025-05-19T11:03:43.547Z" }, - { url = "https://files.pythonhosted.org/packages/c7/65/46b519555ee9fb851234288be7c78be11e6260995281071d13abf2c313d0/shapely-2.1.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f24f2ecda1e6c091da64bcbef8dd121380948074875bd1b247b3d17e99407099", size = 1628541, upload-time = "2025-05-19T11:03:45.162Z" }, - { url = "https://files.pythonhosted.org/packages/29/51/0b158a261df94e33505eadfe737db9531f346dfa60850945ad25fd4162f1/shapely-2.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45112a5be0b745b49e50f8829ce490eb67fefb0cea8d4f8ac5764bfedaa83d2d", size = 2948453, upload-time = "2025-05-19T11:03:46.681Z" }, - { url = "https://files.pythonhosted.org/packages/a9/4f/6c9bb4bd7b1a14d7051641b9b479ad2a643d5cbc382bcf5bd52fd0896974/shapely-2.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c10ce6f11904d65e9bbb3e41e774903c944e20b3f0b282559885302f52f224a", size = 3057029, upload-time = "2025-05-19T11:03:48.346Z" }, - { url = "https://files.pythonhosted.org/packages/89/0b/ad1b0af491d753a83ea93138eee12a4597f763ae12727968d05934fe7c78/shapely-2.1.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:61168010dfe4e45f956ffbbaf080c88afce199ea81eb1f0ac43230065df320bd", size = 3894342, upload-time = "2025-05-19T11:03:49.602Z" }, - { url = "https://files.pythonhosted.org/packages/7d/96/73232c5de0b9fdf0ec7ddfc95c43aaf928740e87d9f168bff0e928d78c6d/shapely-2.1.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cacf067cdff741cd5c56a21c52f54ece4e4dad9d311130493a791997da4a886b", size = 4056766, upload-time = "2025-05-19T11:03:51.252Z" }, - { url = "https://files.pythonhosted.org/packages/43/cc/eec3c01f754f5b3e0c47574b198f9deb70465579ad0dad0e1cef2ce9e103/shapely-2.1.1-cp310-cp310-win32.whl", hash = "sha256:23b8772c3b815e7790fb2eab75a0b3951f435bc0fce7bb146cb064f17d35ab4f", size = 1523744, upload-time = "2025-05-19T11:03:52.624Z" }, - { url = "https://files.pythonhosted.org/packages/50/fc/a7187e6dadb10b91e66a9e715d28105cde6489e1017cce476876185a43da/shapely-2.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:2c7b2b6143abf4fa77851cef8ef690e03feade9a0d48acd6dc41d9e0e78d7ca6", size = 1703061, upload-time = "2025-05-19T11:03:54.695Z" }, - { url = "https://files.pythonhosted.org/packages/19/97/2df985b1e03f90c503796ad5ecd3d9ed305123b64d4ccb54616b30295b29/shapely-2.1.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:587a1aa72bc858fab9b8c20427b5f6027b7cbc92743b8e2c73b9de55aa71c7a7", size = 1819368, upload-time = "2025-05-19T11:03:55.937Z" }, - { url = "https://files.pythonhosted.org/packages/56/17/504518860370f0a28908b18864f43d72f03581e2b6680540ca668f07aa42/shapely-2.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9fa5c53b0791a4b998f9ad84aad456c988600757a96b0a05e14bba10cebaaaea", size = 1625362, upload-time = "2025-05-19T11:03:57.06Z" }, - { url = "https://files.pythonhosted.org/packages/36/a1/9677337d729b79fce1ef3296aac6b8ef4743419086f669e8a8070eff8f40/shapely-2.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aabecd038841ab5310d23495253f01c2a82a3aedae5ab9ca489be214aa458aa7", size = 2999005, upload-time = "2025-05-19T11:03:58.692Z" }, - { url = "https://files.pythonhosted.org/packages/a2/17/e09357274699c6e012bbb5a8ea14765a4d5860bb658df1931c9f90d53bd3/shapely-2.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:586f6aee1edec04e16227517a866df3e9a2e43c1f635efc32978bb3dc9c63753", size = 3108489, upload-time = "2025-05-19T11:04:00.059Z" }, - { url = "https://files.pythonhosted.org/packages/17/5d/93a6c37c4b4e9955ad40834f42b17260ca74ecf36df2e81bb14d12221b90/shapely-2.1.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b9878b9e37ad26c72aada8de0c9cfe418d9e2ff36992a1693b7f65a075b28647", size = 3945727, upload-time = "2025-05-19T11:04:01.786Z" }, - { url = "https://files.pythonhosted.org/packages/a3/1a/ad696648f16fd82dd6bfcca0b3b8fbafa7aacc13431c7fc4c9b49e481681/shapely-2.1.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d9a531c48f289ba355e37b134e98e28c557ff13965d4653a5228d0f42a09aed0", size = 4109311, upload-time = "2025-05-19T11:04:03.134Z" }, - { url = "https://files.pythonhosted.org/packages/d4/38/150dd245beab179ec0d4472bf6799bf18f21b1efbef59ac87de3377dbf1c/shapely-2.1.1-cp311-cp311-win32.whl", hash = "sha256:4866de2673a971820c75c0167b1f1cd8fb76f2d641101c23d3ca021ad0449bab", size = 1522982, upload-time = "2025-05-19T11:04:05.217Z" }, - { url = "https://files.pythonhosted.org/packages/93/5b/842022c00fbb051083c1c85430f3bb55565b7fd2d775f4f398c0ba8052ce/shapely-2.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:20a9d79958b3d6c70d8a886b250047ea32ff40489d7abb47d01498c704557a93", size = 1703872, upload-time = "2025-05-19T11:04:06.791Z" }, - { url = "https://files.pythonhosted.org/packages/fb/64/9544dc07dfe80a2d489060791300827c941c451e2910f7364b19607ea352/shapely-2.1.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:2827365b58bf98efb60affc94a8e01c56dd1995a80aabe4b701465d86dcbba43", size = 1833021, upload-time = "2025-05-19T11:04:08.022Z" }, - { url = "https://files.pythonhosted.org/packages/07/aa/fb5f545e72e89b6a0f04a0effda144f5be956c9c312c7d4e00dfddbddbcf/shapely-2.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a9c551f7fa7f1e917af2347fe983f21f212863f1d04f08eece01e9c275903fad", size = 1643018, upload-time = "2025-05-19T11:04:09.343Z" }, - { url = "https://files.pythonhosted.org/packages/03/46/61e03edba81de729f09d880ce7ae5c1af873a0814206bbfb4402ab5c3388/shapely-2.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78dec4d4fbe7b1db8dc36de3031767e7ece5911fb7782bc9e95c5cdec58fb1e9", size = 2986417, upload-time = "2025-05-19T11:04:10.56Z" }, - { url = "https://files.pythonhosted.org/packages/1f/1e/83ec268ab8254a446b4178b45616ab5822d7b9d2b7eb6e27cf0b82f45601/shapely-2.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:872d3c0a7b8b37da0e23d80496ec5973c4692920b90de9f502b5beb994bbaaef", size = 3098224, upload-time = "2025-05-19T11:04:11.903Z" }, - { url = "https://files.pythonhosted.org/packages/f1/44/0c21e7717c243e067c9ef8fa9126de24239f8345a5bba9280f7bb9935959/shapely-2.1.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2e2b9125ebfbc28ecf5353511de62f75a8515ae9470521c9a693e4bb9fbe0cf1", size = 3925982, upload-time = "2025-05-19T11:04:13.224Z" }, - { url = "https://files.pythonhosted.org/packages/15/50/d3b4e15fefc103a0eb13d83bad5f65cd6e07a5d8b2ae920e767932a247d1/shapely-2.1.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:4b96cea171b3d7f6786976a0520f178c42792897653ecca0c5422fb1e6946e6d", size = 4089122, upload-time = "2025-05-19T11:04:14.477Z" }, - { url = "https://files.pythonhosted.org/packages/bd/05/9a68f27fc6110baeedeeebc14fd86e73fa38738c5b741302408fb6355577/shapely-2.1.1-cp312-cp312-win32.whl", hash = "sha256:39dca52201e02996df02e447f729da97cfb6ff41a03cb50f5547f19d02905af8", size = 1522437, upload-time = "2025-05-19T11:04:16.203Z" }, - { url = "https://files.pythonhosted.org/packages/bc/e9/a4560e12b9338842a1f82c9016d2543eaa084fce30a1ca11991143086b57/shapely-2.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:13d643256f81d55a50013eff6321142781cf777eb6a9e207c2c9e6315ba6044a", size = 1703479, upload-time = "2025-05-19T11:04:18.497Z" }, + { name = "numpy", version = "1.26.4", source = { registry = "https://pypi.org/simple" }, marker = "extra == 'extra-16-inference-models-onnx-jp6-cu126' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and extra == 'extra-16-inference-models-falcon-perception') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "2.4.4", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and extra == 'extra-16-inference-models-falcon-perception') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/4d/bc/0989043118a27cccb4e906a46b7565ce36ca7b57f5a18b78f4f1b0f72d9d/shapely-2.1.2.tar.gz", hash = "sha256:2ed4ecb28320a433db18a5bf029986aa8afcfd740745e78847e330d5d94922a9", size = 315489, upload-time = "2025-09-24T13:51:41.432Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/05/89/c3548aa9b9812a5d143986764dededfa48d817714e947398bdda87c77a72/shapely-2.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7ae48c236c0324b4e139bea88a306a04ca630f49be66741b340729d380d8f52f", size = 1825959, upload-time = "2025-09-24T13:50:00.682Z" }, + { url = "https://files.pythonhosted.org/packages/ce/8a/7ebc947080442edd614ceebe0ce2cdbd00c25e832c240e1d1de61d0e6b38/shapely-2.1.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:eba6710407f1daa8e7602c347dfc94adc02205ec27ed956346190d66579eb9ea", size = 1629196, upload-time = "2025-09-24T13:50:03.447Z" }, + { url = "https://files.pythonhosted.org/packages/c8/86/c9c27881c20d00fc409e7e059de569d5ed0abfcec9c49548b124ebddea51/shapely-2.1.2-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ef4a456cc8b7b3d50ccec29642aa4aeda959e9da2fe9540a92754770d5f0cf1f", size = 2951065, upload-time = "2025-09-24T13:50:05.266Z" }, + { url = "https://files.pythonhosted.org/packages/50/8a/0ab1f7433a2a85d9e9aea5b1fbb333f3b09b309e7817309250b4b7b2cc7a/shapely-2.1.2-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e38a190442aacc67ff9f75ce60aec04893041f16f97d242209106d502486a142", size = 3058666, upload-time = "2025-09-24T13:50:06.872Z" }, + { url = "https://files.pythonhosted.org/packages/bb/c6/5a30ffac9c4f3ffd5b7113a7f5299ccec4713acd5ee44039778a7698224e/shapely-2.1.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:40d784101f5d06a1fd30b55fc11ea58a61be23f930d934d86f19a180909908a4", size = 3966905, upload-time = "2025-09-24T13:50:09.417Z" }, + { url = "https://files.pythonhosted.org/packages/9c/72/e92f3035ba43e53959007f928315a68fbcf2eeb4e5ededb6f0dc7ff1ecc3/shapely-2.1.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f6f6cd5819c50d9bcf921882784586aab34a4bd53e7553e175dece6db513a6f0", size = 4129260, upload-time = "2025-09-24T13:50:11.183Z" }, + { url = "https://files.pythonhosted.org/packages/42/24/605901b73a3d9f65fa958e63c9211f4be23d584da8a1a7487382fac7fdc5/shapely-2.1.2-cp310-cp310-win32.whl", hash = "sha256:fe9627c39c59e553c90f5bc3128252cb85dc3b3be8189710666d2f8bc3a5503e", size = 1544301, upload-time = "2025-09-24T13:50:12.521Z" }, + { url = "https://files.pythonhosted.org/packages/e1/89/6db795b8dd3919851856bd2ddd13ce434a748072f6fdee42ff30cbd3afa3/shapely-2.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:1d0bfb4b8f661b3b4ec3565fa36c340bfb1cda82087199711f86a88647d26b2f", size = 1722074, upload-time = "2025-09-24T13:50:13.909Z" }, + { url = "https://files.pythonhosted.org/packages/8f/8d/1ff672dea9ec6a7b5d422eb6d095ed886e2e523733329f75fdcb14ee1149/shapely-2.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:91121757b0a36c9aac3427a651a7e6567110a4a67c97edf04f8d55d4765f6618", size = 1820038, upload-time = "2025-09-24T13:50:15.628Z" }, + { url = "https://files.pythonhosted.org/packages/4f/ce/28fab8c772ce5db23a0d86bf0adaee0c4c79d5ad1db766055fa3dab442e2/shapely-2.1.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:16a9c722ba774cf50b5d4541242b4cce05aafd44a015290c82ba8a16931ff63d", size = 1626039, upload-time = "2025-09-24T13:50:16.881Z" }, + { url = "https://files.pythonhosted.org/packages/70/8b/868b7e3f4982f5006e9395c1e12343c66a8155c0374fdc07c0e6a1ab547d/shapely-2.1.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cc4f7397459b12c0b196c9efe1f9d7e92463cbba142632b4cc6d8bbbbd3e2b09", size = 3001519, upload-time = "2025-09-24T13:50:18.606Z" }, + { url = "https://files.pythonhosted.org/packages/13/02/58b0b8d9c17c93ab6340edd8b7308c0c5a5b81f94ce65705819b7416dba5/shapely-2.1.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:136ab87b17e733e22f0961504d05e77e7be8c9b5a8184f685b4a91a84efe3c26", size = 3110842, upload-time = "2025-09-24T13:50:21.77Z" }, + { url = "https://files.pythonhosted.org/packages/af/61/8e389c97994d5f331dcffb25e2fa761aeedfb52b3ad9bcdd7b8671f4810a/shapely-2.1.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:16c5d0fc45d3aa0a69074979f4f1928ca2734fb2e0dde8af9611e134e46774e7", size = 4021316, upload-time = "2025-09-24T13:50:23.626Z" }, + { url = "https://files.pythonhosted.org/packages/d3/d4/9b2a9fe6039f9e42ccf2cb3e84f219fd8364b0c3b8e7bbc857b5fbe9c14c/shapely-2.1.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6ddc759f72b5b2b0f54a7e7cde44acef680a55019eb52ac63a7af2cf17cb9cd2", size = 4178586, upload-time = "2025-09-24T13:50:25.443Z" }, + { url = "https://files.pythonhosted.org/packages/16/f6/9840f6963ed4decf76b08fd6d7fed14f8779fb7a62cb45c5617fa8ac6eab/shapely-2.1.2-cp311-cp311-win32.whl", hash = "sha256:2fa78b49485391224755a856ed3b3bd91c8455f6121fee0db0e71cefb07d0ef6", size = 1543961, upload-time = "2025-09-24T13:50:26.968Z" }, + { url = "https://files.pythonhosted.org/packages/38/1e/3f8ea46353c2a33c1669eb7327f9665103aa3a8dfe7f2e4ef714c210b2c2/shapely-2.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:c64d5c97b2f47e3cd9b712eaced3b061f2b71234b3fc263e0fcf7d889c6559dc", size = 1722856, upload-time = "2025-09-24T13:50:28.497Z" }, + { url = "https://files.pythonhosted.org/packages/24/c0/f3b6453cf2dfa99adc0ba6675f9aaff9e526d2224cbd7ff9c1a879238693/shapely-2.1.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fe2533caae6a91a543dec62e8360fe86ffcdc42a7c55f9dfd0128a977a896b94", size = 1833550, upload-time = "2025-09-24T13:50:30.019Z" }, + { url = "https://files.pythonhosted.org/packages/86/07/59dee0bc4b913b7ab59ab1086225baca5b8f19865e6101db9ebb7243e132/shapely-2.1.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ba4d1333cc0bc94381d6d4308d2e4e008e0bd128bdcff5573199742ee3634359", size = 1643556, upload-time = "2025-09-24T13:50:32.291Z" }, + { url = "https://files.pythonhosted.org/packages/26/29/a5397e75b435b9895cd53e165083faed5d12fd9626eadec15a83a2411f0f/shapely-2.1.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0bd308103340030feef6c111d3eb98d50dc13feea33affc8a6f9fa549e9458a3", size = 2988308, upload-time = "2025-09-24T13:50:33.862Z" }, + { url = "https://files.pythonhosted.org/packages/b9/37/e781683abac55dde9771e086b790e554811a71ed0b2b8a1e789b7430dd44/shapely-2.1.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1e7d4d7ad262a48bb44277ca12c7c78cb1b0f56b32c10734ec9a1d30c0b0c54b", size = 3099844, upload-time = "2025-09-24T13:50:35.459Z" }, + { url = "https://files.pythonhosted.org/packages/d8/f3/9876b64d4a5a321b9dc482c92bb6f061f2fa42131cba643c699f39317cb9/shapely-2.1.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e9eddfe513096a71896441a7c37db72da0687b34752c4e193577a145c71736fc", size = 3988842, upload-time = "2025-09-24T13:50:37.478Z" }, + { url = "https://files.pythonhosted.org/packages/d1/a0/704c7292f7014c7e74ec84eddb7b109e1fbae74a16deae9c1504b1d15565/shapely-2.1.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:980c777c612514c0cf99bc8a9de6d286f5e186dcaf9091252fcd444e5638193d", size = 4152714, upload-time = "2025-09-24T13:50:39.9Z" }, + { url = "https://files.pythonhosted.org/packages/53/46/319c9dc788884ad0785242543cdffac0e6530e4d0deb6c4862bc4143dcf3/shapely-2.1.2-cp312-cp312-win32.whl", hash = "sha256:9111274b88e4d7b54a95218e243282709b330ef52b7b86bc6aaf4f805306f454", size = 1542745, upload-time = "2025-09-24T13:50:41.414Z" }, + { url = "https://files.pythonhosted.org/packages/ec/bf/cb6c1c505cb31e818e900b9312d514f381fbfa5c4363edfce0fcc4f8c1a4/shapely-2.1.2-cp312-cp312-win_amd64.whl", hash = "sha256:743044b4cfb34f9a67205cee9279feaf60ba7d02e69febc2afc609047cb49179", size = 1722861, upload-time = "2025-09-24T13:50:43.35Z" }, ] [[package]] @@ -4185,28 +12838,28 @@ wheels = [ [[package]] name = "siphash24" -version = "1.7" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/0e/be/f0a0ffbb00c51c5633b41459b5ce9b017c025a9256b4403e648c18e70850/siphash24-1.7.tar.gz", hash = "sha256:6e90fee5f199ea25b4e7303646b31872a437174fe885a93dbd4cf7784eb48164", size = 19801, upload-time = "2024-10-15T13:41:51.924Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/3f/d7/4aa16f020c999f0d15e0edfddf805958a77d3c2f57b7505c8c98af6695a6/siphash24-1.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ac574867427640d531e7d034e974986ca3ab612699d7c989757e411ef75ec13a", size = 80161, upload-time = "2024-10-15T13:41:05.334Z" }, - { url = "https://files.pythonhosted.org/packages/e0/af/75db98d45694928d586c46cb8d38b4ba056e9ad22520db3f5fe3286e756e/siphash24-1.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:69f50254c7aa6e4c749b7003d70a146e4d24dffa8393a54d288862944b180c83", size = 75233, upload-time = "2024-10-15T13:41:06.549Z" }, - { url = "https://files.pythonhosted.org/packages/a7/28/ee5bc61bf0d834cb31bd41f4f6f56ccfd9eeda25e0522e9722de4700a0de/siphash24-1.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cd17d38233a7a1db1f9e8e5043b6035ed1000477ceda30dc3d1a6c5d13e92ed", size = 101148, upload-time = "2024-10-15T13:41:08.832Z" }, - { url = "https://files.pythonhosted.org/packages/43/ee/b225619e0e6889ee3a71175a966ba5d1b84e4fe6cf7c85b5756fbbf3892a/siphash24-1.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db372dc1b242f7fa08c42cdf4e5257a9ac416e6ad42c6d7ee96862be36f59f1b", size = 106023, upload-time = "2024-10-15T13:41:10.302Z" }, - { url = "https://files.pythonhosted.org/packages/e6/5b/adc0483bd11cff6c3eda2d9ecac670d04b97655cc79d4593def2bac832c0/siphash24-1.7-cp310-cp310-win32.whl", hash = "sha256:e72b218d949e954b4e4bae6d8b58fa687ba81f844c9ff5edaef1a3facafc6b46", size = 68073, upload-time = "2024-10-15T13:41:11.983Z" }, - { url = "https://files.pythonhosted.org/packages/41/93/918b04eb6fa5a6d0099067d1d83d679f4709a6014f9f4bc43edef7e1b5a7/siphash24-1.7-cp310-cp310-win_amd64.whl", hash = "sha256:b6cadb06d4a4d305ff8c5f95cab03ed611ae19fe292c77371ee8b852f6075458", size = 79909, upload-time = "2024-10-15T13:41:13.72Z" }, - { url = "https://files.pythonhosted.org/packages/4e/67/4ffd23a848739966e1b314ef99f6410035bccee00be14261313787b8f506/siphash24-1.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:de75488e93f1cd12c8d5004efd1ebd958c0265205a9d73e8dd8b071900838841", size = 80493, upload-time = "2024-10-15T13:41:14.727Z" }, - { url = "https://files.pythonhosted.org/packages/56/bd/ec198a8c7aef65e967ae84f633bd9950d784c9e527d738c9a3e4bccc34a5/siphash24-1.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ffca9908450f9f346e97a223185fcd16217d67d84c6f246f3080c4224f41a514", size = 75350, upload-time = "2024-10-15T13:41:16.262Z" }, - { url = "https://files.pythonhosted.org/packages/50/5a/77838c916bd15addfc2e51286db4c442cb12e25eb4f8d296c394c2280240/siphash24-1.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b8ff44ce166452993fea267ea1b2fd089d8e7f103b13d360da441f12b0df121d", size = 100567, upload-time = "2024-10-15T13:41:17.435Z" }, - { url = "https://files.pythonhosted.org/packages/f0/aa/736a0a2efae9a6f69ac1ee4d28c2274fcad2150349fac752d6c525c4e06e/siphash24-1.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4062548dcb1eef13bbe0356d6f8675bfe4571ef38d7103445daa82ba167240d1", size = 105630, upload-time = "2024-10-15T13:41:18.578Z" }, - { url = "https://files.pythonhosted.org/packages/79/52/1afbd70142d3db093d49197e3abe15ca2f1a14678299327ba776944b4771/siphash24-1.7-cp311-cp311-win32.whl", hash = "sha256:7b4ea29376b688fbcc3d25707c15a9dfe7b4ebbc4322878d75bb77e199210a39", size = 67648, upload-time = "2024-10-15T13:41:19.606Z" }, - { url = "https://files.pythonhosted.org/packages/b5/1d/bedcd04c2d1d199c9f6b3e61a6caae0e17257696c9f49594e49856b17a99/siphash24-1.7-cp311-cp311-win_amd64.whl", hash = "sha256:ec06104e6ef1e512ee30f1b8aeae2b83c0f55f12a94042f0df5a87d43a1f4c52", size = 80046, upload-time = "2024-10-15T13:41:20.654Z" }, - { url = "https://files.pythonhosted.org/packages/3e/62/93e552af9535a416f684327f870143ee42fc9e816091672467cdfd62cce6/siphash24-1.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:76a64ff0cdd192e4d0a391956d9b121c56ed56e773c5ab7eb7c3e035fd16e8cb", size = 82084, upload-time = "2024-10-15T13:41:21.776Z" }, - { url = "https://files.pythonhosted.org/packages/59/3e/b0791ab53aa9ac191b71a021eab2e75baa7c27d7feb7ec148d7961d148ba/siphash24-1.7-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:49ca649bc7437d614f758891deade3b187832792a853269219e77f10509f82fe", size = 76233, upload-time = "2024-10-15T13:41:22.787Z" }, - { url = "https://files.pythonhosted.org/packages/29/4c/4c1b809bf302e9b60f3ec09ba115b2a4ac1ff6755735ee8884924fcdb45e/siphash24-1.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bc37dd0aed23f76bd257fbd2953fd5d954b329d7463c6ff57263a2699c52dde6", size = 98188, upload-time = "2024-10-15T13:41:24.327Z" }, - { url = "https://files.pythonhosted.org/packages/96/bf/e6b49f8ff88130bd224f291ea77d30fdde4df5f6572c519aca5d8fc8a27c/siphash24-1.7-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eea490a200891905856b6ad0f9c56d4ec787876220bcb34c49441b2566b97887", size = 102946, upload-time = "2024-10-15T13:41:25.633Z" }, - { url = "https://files.pythonhosted.org/packages/3d/75/45c831626013950fb2ea715c218c3397e5cf2328a67208bf5d8ff69aa9e6/siphash24-1.7-cp312-cp312-win32.whl", hash = "sha256:69eb8c2c112a738875bb283cd53ef5e86874bc5aed17f3020b38e9174208fb79", size = 68323, upload-time = "2024-10-15T13:41:27.349Z" }, - { url = "https://files.pythonhosted.org/packages/e0/d3/39190c40a68defd19b99c1082dd7455543a52283803bfa111b0e45fae968/siphash24-1.7-cp312-cp312-win_amd64.whl", hash = "sha256:7459569ea4669b6feeaf7d299fc5157cc5c69ca1231dc0decb7a7da2397c782e", size = 81000, upload-time = "2024-10-15T13:41:28.364Z" }, +version = "1.8" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/67/a2/e049b6fccf7a94bd1b2f68b3059a7d6a7aea86a808cac80cb9ae71ab6254/siphash24-1.8.tar.gz", hash = "sha256:aa932f0af4a7335caef772fdaf73a433a32580405c41eb17ff24077944b0aa97", size = 19946, upload-time = "2025-09-02T20:42:04.856Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c3/0a/b98666b7089b35143f27ea4e03eddd9da1f117073c0ca01d96bfbf01885e/siphash24-1.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:53aba67b3437d7b56d85ad77879dfe314094f687df1de746fa7c6f5b3f6c1436", size = 76704, upload-time = "2025-09-02T20:40:53.19Z" }, + { url = "https://files.pythonhosted.org/packages/d4/ed/e919afa4769a93ea44c052e0cbc187c5f2c2bcae59018729c60e30cbe0d2/siphash24-1.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3c28925b79954244c8e36ced9b8e88b1cb2d0919baf2b92ef7e8b8f96fd274aa", size = 74196, upload-time = "2025-09-02T20:40:54.531Z" }, + { url = "https://files.pythonhosted.org/packages/fc/c4/1f206db82f27b4e91528678c9ec21ae558e51aadce43ae5bf4ed2da624cb/siphash24-1.8-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a665c1d8ae3a46fdec7c35db69823a0247b35957c4686c6b14156d5dc4ed8920", size = 99433, upload-time = "2025-09-02T20:40:55.761Z" }, + { url = "https://files.pythonhosted.org/packages/c6/9a/cf46eb22a351eeaa06fc1b01984b9feb55d27f878c74ba3f62b8944849de/siphash24-1.8-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2b9b15af0db3ff357b432c7e51c7cc1a27891aa1b341cc2a2fc1764e3fb5e605", size = 103131, upload-time = "2025-09-02T20:40:57.044Z" }, + { url = "https://files.pythonhosted.org/packages/c2/74/c811dab8b6c4968f321f2f01e26733f2854a3c5ac75fa8feb99d796fb190/siphash24-1.8-cp310-cp310-win32.whl", hash = "sha256:6fd22c0182518c7b17a9be35128c5b00dbb810751de73d9ab85e39494effab0b", size = 62597, upload-time = "2025-09-02T20:40:58.582Z" }, + { url = "https://files.pythonhosted.org/packages/dd/02/be6d3c614425371bffc6918b15b27f3839a4b168454b25c6a26da8e9bfcc/siphash24-1.8-cp310-cp310-win_amd64.whl", hash = "sha256:876ed2507a9573c663b1f6deb29b5ccca41e5a5099f848ed18272e709e6848ca", size = 77313, upload-time = "2025-09-02T20:40:59.742Z" }, + { url = "https://files.pythonhosted.org/packages/82/23/f53f5bd8866c6ea3abe434c9f208e76ea027210d8b75cd0e0dc849661c7a/siphash24-1.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d4662ac616bce4d3c9d6003a0d398e56f8be408fc53a166b79fad08d4f34268e", size = 76930, upload-time = "2025-09-02T20:41:00.869Z" }, + { url = "https://files.pythonhosted.org/packages/0b/25/aebf246904424a06e7ffb7a40cfa9ea9e590ea0fac82e182e0f5d1f1d7ef/siphash24-1.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:53d6bed0951a99c6d2891fa6f8acfd5ca80c3e96c60bcee99f6fa01a04773b1c", size = 74315, upload-time = "2025-09-02T20:41:02.38Z" }, + { url = "https://files.pythonhosted.org/packages/59/3f/7010407c3416ef052d46550d54afb2581fb247018fc6500af8c66669eff2/siphash24-1.8-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d114c03648630e9e07dac2fe95442404e4607adca91640d274ece1a4fa71123e", size = 99756, upload-time = "2025-09-02T20:41:03.902Z" }, + { url = "https://files.pythonhosted.org/packages/d4/9f/09c734833e69badd7e3faed806b4372bd6564ae0946bd250d5239885914f/siphash24-1.8-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:88c1a55ff82b127c5d3b96927a430d8859e6a98846a5b979833ac790682dd91b", size = 104044, upload-time = "2025-09-02T20:41:05.505Z" }, + { url = "https://files.pythonhosted.org/packages/24/30/56a26d9141a34433da221f732599e2b23d2d70a966c249a9f00feb9a2915/siphash24-1.8-cp311-cp311-win32.whl", hash = "sha256:9430255e6a1313470f52c07c4a4643c451a5b2853f6d4008e4dda05cafb6ce7c", size = 62196, upload-time = "2025-09-02T20:41:07.299Z" }, + { url = "https://files.pythonhosted.org/packages/47/b2/11b0ae63fd374652544e1b12f72ba2cc3fe6c93c1483bd8ff6935b0a8a4b/siphash24-1.8-cp311-cp311-win_amd64.whl", hash = "sha256:1e4b37e4ef0b4496169adce2a58b6c3f230b5852dfa5f7ad0b2d664596409e47", size = 77162, upload-time = "2025-09-02T20:41:08.878Z" }, + { url = "https://files.pythonhosted.org/packages/7f/82/ce3545ce8052ac7ca104b183415a27ec3335e5ed51978fdd7b433f3cfe5b/siphash24-1.8-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:df5ed437c6e6cc96196b38728e57cd30b0427df45223475a90e173f5015ef5ba", size = 78136, upload-time = "2025-09-02T20:41:10.083Z" }, + { url = "https://files.pythonhosted.org/packages/15/88/896c3b91bc9deb78c415448b1db67343917f35971a9e23a5967a9d323b8a/siphash24-1.8-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f4ef78abdf811325c7089a35504df339c48c0007d4af428a044431d329721e56", size = 74588, upload-time = "2025-09-02T20:41:11.251Z" }, + { url = "https://files.pythonhosted.org/packages/12/fd/8dad3f5601db485ba862e1c1f91a5d77fb563650856a6708e9acb40ee53c/siphash24-1.8-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:065eff55c4fefb3a29fd26afb2c072abf7f668ffd53b91d41f92a1c485fcbe5c", size = 98655, upload-time = "2025-09-02T20:41:12.45Z" }, + { url = "https://files.pythonhosted.org/packages/e3/cc/e0c352624c1f2faad270aeb5cce6e173977ef66b9b5e918aa6f32af896bf/siphash24-1.8-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ac6fa84ebfd47677262aa0bcb0f5a70f796f5fc5704b287ee1b65a3bd4fb7a5d", size = 103217, upload-time = "2025-09-02T20:41:13.746Z" }, + { url = "https://files.pythonhosted.org/packages/5b/f6/0b1675bea4d40affcae642d9c7337702a4138b93c544230280712403e968/siphash24-1.8-cp312-cp312-win32.whl", hash = "sha256:6582f73615552ca055e51e03cb02a28e570a641a7f500222c86c2d811b5037eb", size = 63114, upload-time = "2025-09-02T20:41:14.972Z" }, + { url = "https://files.pythonhosted.org/packages/3d/39/afefef85d72ed8b5cf1aa9283f712e3cd43c9682fabbc809dec54baa8452/siphash24-1.8-cp312-cp312-win_amd64.whl", hash = "sha256:44ea6d794a7cbe184e1e1da2df81c5ebb672ab3867935c3e87c08bb0c2fa4879", size = 76232, upload-time = "2025-09-02T20:41:16.112Z" }, ] [[package]] @@ -4220,35 +12873,36 @@ wheels = [ [[package]] name = "smmap" -version = "5.0.2" +version = "5.0.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/44/cd/a040c4b3119bbe532e5b0732286f805445375489fceaec1f48306068ee3b/smmap-5.0.2.tar.gz", hash = "sha256:26ea65a03958fa0c8a1c7e8c7a58fdc77221b8910f6be2131affade476898ad5", size = 22329, upload-time = "2025-01-02T07:14:40.909Z" } +sdist = { url = "https://files.pythonhosted.org/packages/1f/ea/49c993d6dfdd7338c9b1000a0f36817ed7ec84577ae2e52f890d1a4ff909/smmap-5.0.3.tar.gz", hash = "sha256:4d9debb8b99007ae47165abc08670bd74cb74b5227dda7f643eccc4e9eb5642c", size = 22506, upload-time = "2026-03-09T03:43:26.1Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/04/be/d09147ad1ec7934636ad912901c5fd7667e1c858e19d355237db0d0cd5e4/smmap-5.0.2-py3-none-any.whl", hash = "sha256:b30115f0def7d7531d22a0fb6502488d879e75b260a9db4d0819cfb25403af5e", size = 24303, upload-time = "2025-01-02T07:14:38.724Z" }, + { url = "https://files.pythonhosted.org/packages/c1/d4/59e74daffcb57a07668852eeeb6035af9f32cbfd7a1d2511f17d2fe6a738/smmap-5.0.3-py3-none-any.whl", hash = "sha256:c106e05d5a61449cf6ba9a1e650227ecfb141590d2a98412103ff35d89fc7b2f", size = 24390, upload-time = "2026-03-09T03:43:24.361Z" }, ] [[package]] name = "sounddevice" -version = "0.5.2" +version = "0.5.5" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cffi" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/91/a6/91e9f08ed37c7c9f56b5227c6aea7f2ae63ba2d59520eefb24e82cbdd589/sounddevice-0.5.2.tar.gz", hash = "sha256:c634d51bd4e922d6f0fa5e1a975cc897c947f61d31da9f79ba7ea34dff448b49", size = 53150, upload-time = "2025-05-16T18:12:27.339Z" } +sdist = { url = "https://files.pythonhosted.org/packages/2a/f9/2592608737553638fca98e21e54bfec40bf577bb98a61b2770c912aab25e/sounddevice-0.5.5.tar.gz", hash = "sha256:22487b65198cb5bf2208755105b524f78ad173e5ab6b445bdab1c989f6698df3", size = 143191, upload-time = "2026-01-23T18:36:43.529Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/75/2d/582738fc01352a5bc20acac9221e58538365cecb3bb264838f66419df219/sounddevice-0.5.2-py3-none-any.whl", hash = "sha256:82375859fac2e73295a4ab3fc60bd4782743157adc339561c1f1142af472f505", size = 32450, upload-time = "2025-05-16T18:12:21.919Z" }, - { url = "https://files.pythonhosted.org/packages/3f/6f/e3dd751face4fcb5be25e8abba22f25d8e6457ebd7e9ed79068b768dc0e5/sounddevice-0.5.2-py3-none-macosx_10_6_x86_64.macosx_10_6_universal2.whl", hash = "sha256:943f27e66037d41435bdd0293454072cdf657b594c9cde63cd01ee3daaac7ab3", size = 108088, upload-time = "2025-05-16T18:12:23.146Z" }, - { url = "https://files.pythonhosted.org/packages/45/0b/bfad79af0b380aa7c0bfe73e4b03e0af45354a48ad62549489bd7696c5b0/sounddevice-0.5.2-py3-none-win32.whl", hash = "sha256:3a113ce614a2c557f14737cb20123ae6298c91fc9301eb014ada0cba6d248c5f", size = 312665, upload-time = "2025-05-16T18:12:24.726Z" }, - { url = "https://files.pythonhosted.org/packages/e1/3e/61d88e6b0a7383127cdc779195cb9d83ebcf11d39bc961de5777e457075e/sounddevice-0.5.2-py3-none-win_amd64.whl", hash = "sha256:e18944b767d2dac3771a7771bdd7ff7d3acd7d334e72c4bedab17d1aed5dbc22", size = 363808, upload-time = "2025-05-16T18:12:26Z" }, + { url = "https://files.pythonhosted.org/packages/1e/0a/478e441fd049002cf308520c0d62dd8333e7c6cc8d997f0dda07b9fbcc46/sounddevice-0.5.5-py3-none-any.whl", hash = "sha256:30ff99f6c107f49d25ad16a45cacd8d91c25a1bcdd3e81a206b921a3a6405b1f", size = 32807, upload-time = "2026-01-23T18:36:35.649Z" }, + { url = "https://files.pythonhosted.org/packages/56/f9/c037c35f6d0b6bc3bc7bfb314f1d6f1f9a341328ef47cd63fc4f850a7b27/sounddevice-0.5.5-py3-none-macosx_10_6_x86_64.macosx_10_6_universal2.whl", hash = "sha256:05eb9fd6c54c38d67741441c19164c0dae8ce80453af2d8c4ad2e7823d15b722", size = 108557, upload-time = "2026-01-23T18:36:37.41Z" }, + { url = "https://files.pythonhosted.org/packages/88/a1/d19dd9889cd4bce2e233c4fac007cd8daaf5b9fe6e6a5d432cf17be0b807/sounddevice-0.5.5-py3-none-win32.whl", hash = "sha256:1234cc9b4c9df97b6cbe748146ae0ec64dd7d6e44739e8e42eaa5b595313a103", size = 317765, upload-time = "2026-01-23T18:36:39.047Z" }, + { url = "https://files.pythonhosted.org/packages/c3/0e/002ed7c4c1c2ab69031f78989d3b789fee3a7fba9e586eb2b81688bf4961/sounddevice-0.5.5-py3-none-win_amd64.whl", hash = "sha256:cfc6b2c49fb7f555591c78cb8ecf48d6a637fd5b6e1db5fec6ed9365d64b3519", size = 365324, upload-time = "2026-01-23T18:36:40.496Z" }, + { url = "https://files.pythonhosted.org/packages/4e/39/a61d4b83a7746b70d23d9173be688c0c6bfc7173772344b7442c2c155497/sounddevice-0.5.5-py3-none-win_arm64.whl", hash = "sha256:3861901ddd8230d2e0e8ae62ac320cdd4c688d81df89da036dcb812f757bb3e6", size = 317115, upload-time = "2026-01-23T18:36:42.235Z" }, ] [[package]] name = "soupsieve" -version = "2.8.1" +version = "2.8.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/89/23/adf3796d740536d63a6fbda113d07e60c734b6ed5d3058d1e47fc0495e47/soupsieve-2.8.1.tar.gz", hash = "sha256:4cf733bc50fa805f5df4b8ef4740fc0e0fa6218cf3006269afd3f9d6d80fd350", size = 117856, upload-time = "2025-12-18T13:50:34.655Z" } +sdist = { url = "https://files.pythonhosted.org/packages/7b/ae/2d9c981590ed9999a0d91755b47fc74f74de286b0f5cee14c9269041e6c4/soupsieve-2.8.3.tar.gz", hash = "sha256:3267f1eeea4251fb42728b6dfb746edc9acaffc4a45b27e19450b676586e8349", size = 118627, upload-time = "2026-01-20T04:27:02.457Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/48/f3/b67d6ea49ca9154453b6d70b34ea22f3996b9fa55da105a79d8732227adc/soupsieve-2.8.1-py3-none-any.whl", hash = "sha256:a11fe2a6f3d76ab3cf2de04eb339c1be5b506a8a47f2ceb6d139803177f85434", size = 36710, upload-time = "2025-12-18T13:50:33.267Z" }, + { url = "https://files.pythonhosted.org/packages/46/2c/1462b1d0a634697ae9e55b3cecdcb64788e8b7d63f54d923fcd0bb140aed/soupsieve-2.8.3-py3-none-any.whl", hash = "sha256:ed64f2ba4eebeab06cc4962affce381647455978ffc1e36bb79a545b91f45a95", size = 37016, upload-time = "2026-01-20T04:27:01.012Z" }, ] [[package]] @@ -4267,22 +12921,27 @@ wheels = [ [[package]] name = "supervision" -version = "0.26.0" +version = "0.27.0.post2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "defusedxml" }, { name = "matplotlib" }, - { name = "numpy" }, - { name = "opencv-python" }, + { name = "numpy", version = "1.26.4", source = { registry = "https://pypi.org/simple" }, marker = "extra == 'extra-16-inference-models-onnx-jp6-cu126' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and extra == 'extra-16-inference-models-falcon-perception') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "2.4.4", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and extra == 'extra-16-inference-models-falcon-perception') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "opencv-python", version = "4.11.0.86", source = { registry = "https://pypi.org/simple" }, marker = "extra == 'extra-16-inference-models-onnx-jp6-cu126' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "opencv-python", version = "4.13.0.92", source = { registry = "https://pypi.org/simple" }, marker = "extra == 'extra-16-inference-models-falcon-perception' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, { name = "pillow" }, + { name = "pydeprecate" }, { name = "pyyaml" }, { name = "requests" }, - { name = "scipy" }, + { name = "scipy", version = "1.15.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "scipy", version = "1.17.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, { name = "tqdm" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/9b/e9/502cb036a3aba19a4278f68a007958687ddb628b97f1646d4689b831525a/supervision-0.26.0.tar.gz", hash = "sha256:03801068ab55f75be10142772c3b1f68826b3d7af98ae341dfe6a474af299fcc", size = 175968, upload-time = "2025-07-16T00:40:52.826Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c5/9c/46d056999c5ea6cf8dc10451163601867247bc1378b69b64c987bd44a336/supervision-0.27.0.post2.tar.gz", hash = "sha256:be1283ef2000a593a4568d83289344af95befb8b847a954d5f099a43cb253dc9", size = 186135, upload-time = "2026-03-14T08:12:10.973Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/cc/a4/3265c5ae60f278c63ecd3fd52528594e8c70c66f15d8d92ff930f47aca50/supervision-0.26.0-py3-none-any.whl", hash = "sha256:dfece5805f8511817f140de2a94a5bcf55b3bb329a2ed6e2b00fb11218301ea6", size = 206739, upload-time = "2025-07-16T00:40:51.007Z" }, + { url = "https://files.pythonhosted.org/packages/6b/34/a138692ced038c135d1a45b73a433c89c57cad2eac9dd55bb38d9de89783/supervision-0.27.0.post2-py3-none-any.whl", hash = "sha256:713a341a563774d6a4b017787baa94f8698a9ea1de57155093d70de3a5007941", size = 217448, upload-time = "2026-03-14T08:12:09.279Z" }, ] [[package]] @@ -4290,18 +12949,24 @@ name = "sympy" version = "1.13.1" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "python_full_version >= '3.12' and sys_platform == 'darwin'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin'", "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux'", - "(python_full_version >= '3.12' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.12' and sys_platform != 'darwin' and sys_platform != 'linux')", - "python_full_version == '3.11.*' and sys_platform == 'darwin'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin'", "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux'", - "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.11.*' and sys_platform != 'darwin' and sys_platform != 'linux')", - "python_full_version < '3.11' and sys_platform == 'darwin'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin'", "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux'", - "(python_full_version < '3.11' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform != 'darwin' and sys_platform != 'linux')", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin'", ] dependencies = [ - { name = "mpmath", marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "mpmath", marker = "extra == 'extra-16-inference-models-torch-cu124' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, ] sdist = { url = "https://files.pythonhosted.org/packages/ca/99/5a5b6f19ff9f083671ddf7b9632028436167cd3d33e11015754e41b249a4/sympy-1.13.1.tar.gz", hash = "sha256:9cebf7e04ff162015ce31c9c6c9144daa34a93bd082f54fd8f12deca4f47515f", size = 7533040, upload-time = "2024-07-19T09:26:51.238Z" } wheels = [ @@ -4313,18 +12978,810 @@ name = "sympy" version = "1.14.0" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "python_full_version >= '3.12' and sys_platform == 'darwin'", - "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux'", - "(python_full_version >= '3.12' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.12' and sys_platform != 'darwin' and sys_platform != 'linux')", - "python_full_version == '3.11.*' and sys_platform == 'darwin'", - "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux'", - "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.11.*' and sys_platform != 'darwin' and sys_platform != 'linux')", - "python_full_version < '3.11' and sys_platform == 'darwin'", - "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux'", - "(python_full_version < '3.11' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform != 'darwin' and sys_platform != 'linux')", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", ] dependencies = [ - { name = "mpmath" }, + { name = "mpmath", marker = "extra == 'extra-16-inference-models-falcon-perception' or extra == 'extra-16-inference-models-torch-cpu' or extra == 'extra-16-inference-models-torch-cu118' or extra != 'extra-16-inference-models-torch-cu124' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, ] sdist = { url = "https://files.pythonhosted.org/packages/83/d3/803453b36afefb7c2bb238361cd4ae6125a569b4db67cd9e79846ba2d68c/sympy-1.14.0.tar.gz", hash = "sha256:d3d3fe8df1e5a0b42f0e7bdf50541697dbe7d23746e894990c030e2b05e72517", size = 7793921, upload-time = "2025-04-27T18:05:01.611Z" } wheels = [ @@ -4333,89 +13790,89 @@ wheels = [ [[package]] name = "tensorrt-cu12" -version = "10.12.0.36" +version = "10.16.1.11" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "tensorrt-cu12-bindings", marker = "sys_platform != 'darwin' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "tensorrt-cu12-libs", marker = "sys_platform != 'darwin' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "tensorrt-cu12-bindings" }, + { name = "tensorrt-cu12-libs" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a2/5f/be05d7a6683e06333a3ba195e5d68dc602294eb21d4bbbe8c01941bb614b/tensorrt_cu12-10.12.0.36.tar.gz", hash = "sha256:aedeee0195c042592ac6b0536b19bc8cdbb1a548f35e09d24fbe78e1c76217c5", size = 18224, upload-time = "2025-06-12T21:54:43.474Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d5/a1/4841ac6b5e6d1d4a7f289013d359ed8479ec186a946e84446822cd4f5cc5/tensorrt_cu12-10.16.1.11.tar.gz", hash = "sha256:a044fe4a2e2e51fc205d806534102529c6f744402350b7730d221c99ffa6d269", size = 17921, upload-time = "2026-04-07T19:42:03.606Z" } [[package]] name = "tensorrt-cu12-bindings" -version = "10.12.0.36" +version = "10.16.1.11" source = { registry = "https://pypi.org/simple" } wheels = [ - { url = "https://files.pythonhosted.org/packages/35/b2/3c77a90a9c4f5b5ec40fa5b5227595f880a42d4e6f19e0868531b85b2442/tensorrt_cu12_bindings-10.12.0.36-cp310-none-manylinux_2_28_x86_64.whl", hash = "sha256:7ecdb6fc2555caed7d4fbbd8158ed7ced64e230c125484f62a5369c40dcc70e5", size = 1172872, upload-time = "2025-06-12T21:44:03.397Z" }, - { url = "https://files.pythonhosted.org/packages/8b/8a/b26adc5c549472cf8fc1ede8702f037e1da8f1755ac5c6f5419ee4703a78/tensorrt_cu12_bindings-10.12.0.36-cp310-none-manylinux_2_31_aarch64.whl", hash = "sha256:d8548ab5976ca5c91279c68ee77f4c892e03460709cfa3fbd2a22aa8123cb731", size = 1242157, upload-time = "2025-06-12T21:31:02.294Z" }, - { url = "https://files.pythonhosted.org/packages/57/11/ebd1abd00bd405c9aca17c4fdd164e9b4ca6091204f2124e4ed3ac8c7d55/tensorrt_cu12_bindings-10.12.0.36-cp310-none-win_amd64.whl", hash = "sha256:71be162a77ec9d5165d8c6ffad95179882e2c8604a68d7e21933258c182a8c0a", size = 869257, upload-time = "2025-06-12T21:55:35.386Z" }, - { url = "https://files.pythonhosted.org/packages/2c/52/d46ecfbeecfeff28b3b58502c3edf1ac03e55042408eddb75eff76c6f382/tensorrt_cu12_bindings-10.12.0.36-cp311-none-manylinux_2_28_x86_64.whl", hash = "sha256:58cf45605bb330e86f8ad49bc8997ed68cfdf5b09da229534fb7f84aa3fe5bf4", size = 1173379, upload-time = "2025-06-12T21:43:46.068Z" }, - { url = "https://files.pythonhosted.org/packages/99/99/6faa695b05413bd1c5358d11fb3ef836c8ff8b7e0c991d1060621d2d7163/tensorrt_cu12_bindings-10.12.0.36-cp311-none-manylinux_2_31_aarch64.whl", hash = "sha256:ae0866a89caaeada1c16776de85413a523f78f53b1fd83f1b903c39eed264d82", size = 1242340, upload-time = "2025-06-12T21:31:32.275Z" }, - { url = "https://files.pythonhosted.org/packages/ab/68/e79bb2748434d81bc77afe2e31d3878f0f0622f7d02bf36b58529ec3b78d/tensorrt_cu12_bindings-10.12.0.36-cp311-none-win_amd64.whl", hash = "sha256:9d6687f056ce603c6a97235fd4291330c06410ac5b899db412597a06720b02bc", size = 868166, upload-time = "2025-06-12T21:56:27.052Z" }, - { url = "https://files.pythonhosted.org/packages/df/40/d6d0599e1b3ac79a3583f03eec951a4c4c91c1a600bcc779e29a3012b359/tensorrt_cu12_bindings-10.12.0.36-cp312-none-manylinux_2_28_x86_64.whl", hash = "sha256:fb3a2ce96c7472a46bbee2030ce6a54fd6a32deda401c1c67d9de057550e0171", size = 1176288, upload-time = "2025-06-12T21:43:29.001Z" }, - { url = "https://files.pythonhosted.org/packages/da/0c/caee237aaa9edc9bfdb04857c3f96313a98ee2e3b9a6a9949b42afc724f5/tensorrt_cu12_bindings-10.12.0.36-cp312-none-manylinux_2_31_aarch64.whl", hash = "sha256:f5128b8b2a379e65c09745ba97df58abf3a418cbfd6508d37f76121d9bdd3bc8", size = 1220840, upload-time = "2025-06-12T21:30:03.028Z" }, - { url = "https://files.pythonhosted.org/packages/c1/c6/31a1285e214c71e33a29d2cbd9c9418cbbcbd255b3f6d62b0c24cb064580/tensorrt_cu12_bindings-10.12.0.36-cp312-none-win_amd64.whl", hash = "sha256:408af91113c22f58f2f08404bfc1548baa8d78ce6126bd543acdc9d5819662ff", size = 871280, upload-time = "2025-06-12T21:55:00.025Z" }, + { url = "https://files.pythonhosted.org/packages/29/10/104667d40f08f0e8f3bf45a958deedc14fb49711e2d76ddb27bda86f19f3/tensorrt_cu12_bindings-10.16.1.11-cp310-none-manylinux_2_28_x86_64.whl", hash = "sha256:c1b6a156db4eab526360f5dba375e5c33e1275e3f36b0a7d375febb3db6202fc", size = 1346282, upload-time = "2026-04-07T19:45:01.555Z" }, + { url = "https://files.pythonhosted.org/packages/98/b2/17a49e8146b9ef4d5c3551674f482d98c6ab54cdf72af00d2c3ba30952fd/tensorrt_cu12_bindings-10.16.1.11-cp310-none-win_amd64.whl", hash = "sha256:10cecfe6ea2e9b8ba11a5e4c0f2ae104ea3c5d22d2ff7799d7f87569cf936942", size = 963353, upload-time = "2026-04-07T19:46:54.038Z" }, + { url = "https://files.pythonhosted.org/packages/88/9a/0b148ab0e3019d0a43e0a2404ff52ac0b3c230dbf776f4aed8960c647cc9/tensorrt_cu12_bindings-10.16.1.11-cp311-none-manylinux_2_28_x86_64.whl", hash = "sha256:150ef2b672487e29a8ea916353467c3e199994fb33589167e3641a928e243aa7", size = 1346362, upload-time = "2026-04-07T19:48:43.236Z" }, + { url = "https://files.pythonhosted.org/packages/9a/f8/a264f4837ac8196ba4764a989e3526edc72ac83799c3b7659208e57b70dd/tensorrt_cu12_bindings-10.16.1.11-cp311-none-win_amd64.whl", hash = "sha256:425382e26ea149ac4a3cba3ae9b1abb4187a14c0cf9add58206d9bb1a236aa26", size = 963754, upload-time = "2026-04-07T19:46:28.672Z" }, + { url = "https://files.pythonhosted.org/packages/81/93/8364c6e89d9a0ad596ddbefe1545e334c55bf1e85491d8cd3af6f5853572/tensorrt_cu12_bindings-10.16.1.11-cp312-none-manylinux_2_28_x86_64.whl", hash = "sha256:8e339335e7255b3fb3954eca61b4acbbdfcf7b28505bd711914519647ba310c8", size = 1341247, upload-time = "2026-04-07T19:49:05.057Z" }, + { url = "https://files.pythonhosted.org/packages/82/50/2153496271c772e9fb6e46de7f00c7d5bd08fea33bdf7a38d60521f5a396/tensorrt_cu12_bindings-10.16.1.11-cp312-none-win_amd64.whl", hash = "sha256:e4d7105b9700987b4fd9362adacc0dfa7bee3025c8ad147a86a4316b6362a85b", size = 964884, upload-time = "2026-04-07T19:47:17.234Z" }, ] [[package]] name = "tensorrt-cu12-libs" -version = "10.12.0.36" +version = "10.16.1.11" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "nvidia-cuda-runtime-cu12", version = "12.4.127", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine != 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "nvidia-cuda-runtime-cu12", version = "12.8.57", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine != 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "nvidia-cuda-runtime-cu12", version = "12.9.79", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine == 'aarch64' and sys_platform == 'linux') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (platform_machine != 'aarch64' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'aarch64' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128')" }, + { name = "nvidia-cuda-runtime-cu12", version = "12.4.127", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "nvidia-cuda-runtime-cu12", version = "12.8.90", source = { registry = "https://pypi.org/simple" }, marker = "(sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "nvidia-cuda-runtime-cu12", version = "12.9.79", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128')" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/1e/bd/31d7dbd2d23a3558f7f4b3447586b4ef9141a0f3a0748c41c730e99cf6a6/tensorrt_cu12_libs-10.12.0.36.tar.gz", hash = "sha256:d26af485ad452599016bde631f4cd223b97f240afb647162ab0c5e8f89708934", size = 709, upload-time = "2025-06-12T21:22:47.972Z" } +sdist = { url = "https://files.pythonhosted.org/packages/56/9b/8d5b85c7cb7feaead7d462e81a4592a9d4ad3f581798f65ed34b765441f3/tensorrt_cu12_libs-10.16.1.11.tar.gz", hash = "sha256:53b45a3e82bead09638e7a3ddf4b927254b81fa0d924fe3fe594420a09ebf48b", size = 15757, upload-time = "2026-04-07T19:42:58.348Z" } [[package]] name = "tensorrt-lean-cu12" -version = "10.12.0.36" +version = "10.16.1.11" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "tensorrt-lean-cu12-bindings", marker = "sys_platform != 'darwin' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "tensorrt-lean-cu12-libs", marker = "sys_platform != 'darwin' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "tensorrt-lean-cu12-bindings" }, + { name = "tensorrt-lean-cu12-libs" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/1a/0a/ab186da5277a7f8f4b81d29ac81b69fa1f80ea5b93e340f98e2079ee0ecb/tensorrt_lean_cu12-10.12.0.36.tar.gz", hash = "sha256:f7c898aa073e15863f578df2f09778823c6e7752fb621923437895bfd778b3fd", size = 18240, upload-time = "2025-06-12T21:59:19.087Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ac/ed/13dd4057958466dd3d551da6ecf40fa4b2993363d2657911d5d58cdcad16/tensorrt_lean_cu12-10.16.1.11.tar.gz", hash = "sha256:54432b02484b6193e0116d902ed0c798ddbb28f54ad460d1f28a8962bb97d221", size = 17946, upload-time = "2026-04-07T19:45:06.303Z" } [[package]] name = "tensorrt-lean-cu12-bindings" -version = "10.12.0.36" +version = "10.16.1.11" source = { registry = "https://pypi.org/simple" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e2/1f/3b8dd113a8d699e2f750f969bb831f764e6cb7e0ad6cfd4283e788c29403/tensorrt_lean_cu12_bindings-10.12.0.36-cp310-none-manylinux_2_28_x86_64.whl", hash = "sha256:f0ca23efe7a14ddca412375cdc473d788a8f6bc217df9d296417dc206b242c05", size = 724246, upload-time = "2025-06-12T21:46:41.409Z" }, - { url = "https://files.pythonhosted.org/packages/5c/f6/5e91418ef7407f8145032cb0f157308e5e829c54487ddfb725fbfd024138/tensorrt_lean_cu12_bindings-10.12.0.36-cp310-none-manylinux_2_31_aarch64.whl", hash = "sha256:ab36c61674c95f5177c8ea8e439cae0a22641b572a4383c10cc433e9920948ff", size = 743013, upload-time = "2025-06-12T21:34:59.815Z" }, - { url = "https://files.pythonhosted.org/packages/4a/6e/34a612a3093cad43fdab60bfdb979c93ae1cb274134b6ffc1c9bfcfb260f/tensorrt_lean_cu12_bindings-10.12.0.36-cp310-none-win_amd64.whl", hash = "sha256:9427a726d4bf273b6e4e1fa847ca5bffb983e7ce2cd7764ddf3282a4db237f68", size = 619431, upload-time = "2025-06-12T21:59:53.001Z" }, - { url = "https://files.pythonhosted.org/packages/16/ff/5892534b5d2fe43a738445558dfb6d7577524a9f13db1b5d1d6e3e7082f0/tensorrt_lean_cu12_bindings-10.12.0.36-cp311-none-manylinux_2_28_x86_64.whl", hash = "sha256:c66351523906f7fe4cffa798f8b2438770fc5aacf838bb0e5bddee0530603b57", size = 723166, upload-time = "2025-06-12T21:47:51.274Z" }, - { url = "https://files.pythonhosted.org/packages/00/0c/3d6c94c0b650944bc4a25aaaa4ee1a0c0c4711483606c38912fc15121985/tensorrt_lean_cu12_bindings-10.12.0.36-cp311-none-manylinux_2_31_aarch64.whl", hash = "sha256:44534893081c349f54fac076cd5cca34b85fffcb06f0fb2f5d66a55a6665fc42", size = 743001, upload-time = "2025-06-12T21:36:00.874Z" }, - { url = "https://files.pythonhosted.org/packages/01/2b/8383c384f3ab6cfc2c01395d0ace25e15b258f669050760a28fa2c502f67/tensorrt_lean_cu12_bindings-10.12.0.36-cp311-none-win_amd64.whl", hash = "sha256:a9661d4937a216fd70684a873517639de9e90bb9a2dd5bb6229b00ea1336302a", size = 619710, upload-time = "2025-06-12T22:01:02.055Z" }, - { url = "https://files.pythonhosted.org/packages/c5/5c/50e520ba56d7b5c93f47f3196f19783be739f17c74d5c2e21a1e80e189fb/tensorrt_lean_cu12_bindings-10.12.0.36-cp312-none-manylinux_2_28_x86_64.whl", hash = "sha256:74224e788fb4fabd2765be6d9fd242144bf7eb1f32295aac10bbd388e7268f07", size = 725212, upload-time = "2025-06-12T21:46:58.876Z" }, - { url = "https://files.pythonhosted.org/packages/b6/25/5c497a3a161e58738ad37aadb9bb2862a741ee5cf299b3dba0dc6df33303/tensorrt_lean_cu12_bindings-10.12.0.36-cp312-none-manylinux_2_31_aarch64.whl", hash = "sha256:e54348bba07ae399b3d9ca0343b51247537a0ffefc747a0ab4d668ecd9015aea", size = 731138, upload-time = "2025-06-12T21:36:28.847Z" }, - { url = "https://files.pythonhosted.org/packages/1b/8a/17851c6e1bd1d896f417d2cd33352835a7dace1792a1f5bf62fb27363f3c/tensorrt_lean_cu12_bindings-10.12.0.36-cp312-none-win_amd64.whl", hash = "sha256:36f41fdabebdb213c8a836b33888e9b7a7f15188926c92f520c6fbd2861869a6", size = 623777, upload-time = "2025-06-12T22:00:45.341Z" }, + { url = "https://files.pythonhosted.org/packages/44/82/05a56e5f50042bbb75b02a1551bdbe67f579e0884e0b75787651e59fdb73/tensorrt_lean_cu12_bindings-10.16.1.11-cp310-none-manylinux_2_28_x86_64.whl", hash = "sha256:7f7342a53535b201572c49713546b5c6cb6c089cf4f3f0ae6d40eb096d4e53d8", size = 811082, upload-time = "2026-04-07T19:48:48.518Z" }, + { url = "https://files.pythonhosted.org/packages/2e/6e/fb9a708ba8cf604f28288c4dd6c2ca6fcccd29226a840cc10b048d1902ae/tensorrt_lean_cu12_bindings-10.16.1.11-cp310-none-win_amd64.whl", hash = "sha256:95979ba357752b128ba37305adead19d4462f2c86ef10f1c1fd067677653139f", size = 666211, upload-time = "2026-04-07T19:46:36.606Z" }, + { url = "https://files.pythonhosted.org/packages/06/ec/5f3027d29344b67902fcd223e96b1a33b31764a10e4e71109666f663f2b9/tensorrt_lean_cu12_bindings-10.16.1.11-cp311-none-manylinux_2_28_x86_64.whl", hash = "sha256:c5c56343829a5f455c945acd9a05880eaa7b512f27e8bc537e898f003046f0cc", size = 812721, upload-time = "2026-04-07T19:49:36.174Z" }, + { url = "https://files.pythonhosted.org/packages/51/4d/8dd996a01f95f5d5e6f6939fc662e47d7f2c95c0b874edc36aa34f84eb5f/tensorrt_lean_cu12_bindings-10.16.1.11-cp311-none-win_amd64.whl", hash = "sha256:f4919884fc5c9203872bafa7ac0734ce6bb8611acde193f39b900458bd2fcaca", size = 666791, upload-time = "2026-04-07T19:49:55.037Z" }, + { url = "https://files.pythonhosted.org/packages/55/5d/86c05aac1f18d6de2c4b1e7bd17c023316fe990b42369f85b821a8bc008c/tensorrt_lean_cu12_bindings-10.16.1.11-cp312-none-manylinux_2_28_x86_64.whl", hash = "sha256:c9eb55a2d9eb531ac28fc599faa6febf9d0bd582792a2472435444ad770e60d4", size = 812654, upload-time = "2026-04-07T19:46:58.737Z" }, + { url = "https://files.pythonhosted.org/packages/d4/67/4d6d21d14482f5414429e4f357a158c37aff72fc389cb166e29661d8a170/tensorrt_lean_cu12_bindings-10.16.1.11-cp312-none-win_amd64.whl", hash = "sha256:035f556feafffc79f42e3a99bd00f7d34be19dec219c2eab03200174b0e1606f", size = 668866, upload-time = "2026-04-07T19:49:17.137Z" }, ] [[package]] name = "tensorrt-lean-cu12-libs" -version = "10.12.0.36" +version = "10.16.1.11" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "nvidia-cuda-runtime-cu12", version = "12.4.127", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine != 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "nvidia-cuda-runtime-cu12", version = "12.8.57", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine != 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "nvidia-cuda-runtime-cu12", version = "12.9.79", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine == 'aarch64' and sys_platform == 'linux') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (platform_machine != 'aarch64' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'aarch64' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128')" }, + { name = "nvidia-cuda-runtime-cu12", version = "12.4.127", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "nvidia-cuda-runtime-cu12", version = "12.8.90", source = { registry = "https://pypi.org/simple" }, marker = "(sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "nvidia-cuda-runtime-cu12", version = "12.9.79", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128')" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/6c/ae/b88761f3361a691b9e6f7f8579e7317961e1858fd56385c087bda4a24bc6/tensorrt_lean_cu12_libs-10.16.1.11-py3-none-manylinux_2_28_x86_64.whl", hash = "sha256:b472f89c55752ea35406b951da629f033e598a522bcb4e472e741c0f1f91eb58", size = 46408436, upload-time = "2026-04-07T19:45:50.118Z" }, + { url = "https://files.pythonhosted.org/packages/4a/e8/274eb67d58a588b4f3cd687abebc05f5d9e0e8fd5db81d4104ad197cfdd0/tensorrt_lean_cu12_libs-10.16.1.11-py3-none-win_amd64.whl", hash = "sha256:4f6b5724d7356928930e1997bea1f287d984aad5aca450d4fa0c393da877b7ea", size = 16937568, upload-time = "2026-04-07T19:46:13.445Z" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/25/13/f623c012b4a5933dd7384ac68631342f89cd659dc67742841687baa26c78/tensorrt_lean_cu12_libs-10.12.0.36.tar.gz", hash = "sha256:3ac037cce0ba06e6bafb2d9b88bf5dba87356a1a11df66754e940cfea15305d8", size = 716, upload-time = "2025-06-12T21:25:02.299Z" } [[package]] name = "tifffile" version = "2025.5.10" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "python_full_version < '3.11' and sys_platform == 'darwin'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin'", "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux'", - "(python_full_version < '3.11' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform != 'darwin' and sys_platform != 'linux')", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin'", ] dependencies = [ - { name = "numpy", marker = "python_full_version < '3.11' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "1.26.4", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and extra == 'extra-16-inference-models-falcon-perception') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, ] sdist = { url = "https://files.pythonhosted.org/packages/44/d0/18fed0fc0916578a4463f775b0fbd9c5fed2392152d039df2fb533bfdd5d/tifffile-2025.5.10.tar.gz", hash = "sha256:018335d34283aa3fd8c263bae5c3c2b661ebc45548fde31504016fcae7bf1103", size = 365290, upload-time = "2025-05-10T19:22:34.386Z" } wheels = [ @@ -4424,51 +13881,661 @@ wheels = [ [[package]] name = "tifffile" -version = "2025.9.9" +version = "2026.3.3" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "python_full_version >= '3.12' and sys_platform == 'darwin'", - "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux'", - "(python_full_version >= '3.12' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.12' and sys_platform != 'darwin' and sys_platform != 'linux')", - "python_full_version == '3.11.*' and sys_platform == 'darwin'", - "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux'", - "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.11.*' and sys_platform != 'darwin' and sys_platform != 'linux')", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", ] dependencies = [ - { name = "numpy", marker = "python_full_version >= '3.11' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "1.26.4", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "2.4.4", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and extra == 'extra-16-inference-models-falcon-perception') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/3a/a7/b77bd01f97d72bb70194f036e77f45927978f43017254762c784d7e10f49/tifffile-2025.9.9.tar.gz", hash = "sha256:6cf97ef548970eee9940cf8fc4203e57b4462a72e1e5e7a667ecdeb96113bc5f", size = 369652, upload-time = "2025-09-10T00:02:19.534Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c5/cb/2f6d79c7576e22c116352a801f4c3c8ace5957e9aced862012430b62e14f/tifffile-2026.3.3.tar.gz", hash = "sha256:d9a1266bed6f2ee1dd0abde2018a38b4f8b2935cb843df381d70ac4eac5458b7", size = 388745, upload-time = "2026-03-03T19:14:38.134Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/48/c5/0d57e3547add58285f401afbc421bd3ffeddbbd275a2c0b980b9067fda4a/tifffile-2025.9.9-py3-none-any.whl", hash = "sha256:239247551fa10b5679036ee030cdbeb7762bc1b3f11b1ddaaf50759ef8b4eb26", size = 230668, upload-time = "2025-09-10T00:02:17.839Z" }, + { url = "https://files.pythonhosted.org/packages/1a/e4/e804505f87627cd8cdae9c010c47c4485fd8c1ce31a7dd0ab7fcc4707377/tifffile-2026.3.3-py3-none-any.whl", hash = "sha256:e8be15c94273113d31ecb7aa3a39822189dd11c4967e3cc88c178f1ad2fd1170", size = 243960, upload-time = "2026-03-03T19:14:35.808Z" }, ] [[package]] name = "timm" -version = "1.0.15" +version = "1.0.26" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "huggingface-hub" }, { name = "pyyaml" }, { name = "safetensors" }, - { name = "torch", version = "2.6.0+cu124", source = { registry = "https://download.pytorch.org/whl/cu124" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torch", version = "2.7.1", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torch", version = "2.7.1+cpu", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torch", version = "2.7.1+cu118", source = { registry = "https://download.pytorch.org/whl/cu118" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torch", version = "2.7.1+cu128", source = { registry = "https://download.pytorch.org/whl/cu128" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torch", version = "2.8.0", source = { registry = "https://pypi.jetson-ai-lab.io/jp6/cu126/+simple" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torch", version = "2.8.0", source = { registry = "https://pypi.org/simple" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torchvision", version = "0.21.0+cu124", source = { registry = "https://download.pytorch.org/whl/cu124" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torchvision", version = "0.22.1", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torchvision", version = "0.22.1", source = { registry = "https://download.pytorch.org/whl/cu128" }, marker = "(platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torchvision", version = "0.22.1+cpu", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(platform_machine != 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torchvision", version = "0.22.1+cu118", source = { registry = "https://download.pytorch.org/whl/cu118" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torchvision", version = "0.22.1+cu128", source = { registry = "https://download.pytorch.org/whl/cu128" }, marker = "(platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torchvision", version = "0.23.0", source = { registry = "https://pypi.jetson-ai-lab.io/jp6/cu126/+simple" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torchvision", version = "0.23.0", source = { registry = "https://pypi.org/simple" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torch", version = "2.6.0+cu124", source = { registry = "https://download.pytorch.org/whl/cu124" }, marker = "extra == 'extra-16-inference-models-torch-cu124' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torch", version = "2.7.1+cu118", source = { registry = "https://download.pytorch.org/whl/cu118" }, marker = "extra == 'extra-16-inference-models-torch-cu118' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torch", version = "2.11.0", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torch", version = "2.11.0", source = { registry = "https://pypi.jetson-ai-lab.io/jp6/cu126/+simple" }, marker = "extra == 'extra-16-inference-models-torch-jp6-cu126' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126')" }, + { name = "torch", version = "2.11.0", source = { registry = "https://pypi.org/simple" }, marker = "extra == 'extra-16-inference-models-torch-cu126' or extra == 'extra-16-inference-models-torch-cu130' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torch", version = "2.11.0+cpu", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torch", version = "2.11.0+cu128", source = { registry = "https://download.pytorch.org/whl/cu128" }, marker = "extra == 'extra-16-inference-models-torch-cu128' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torchvision", version = "0.21.0+cu124", source = { registry = "https://download.pytorch.org/whl/cu124" }, marker = "extra == 'extra-16-inference-models-torch-cu124' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torchvision", version = "0.22.1+cu118", source = { registry = "https://download.pytorch.org/whl/cu118" }, marker = "extra == 'extra-16-inference-models-torch-cu118' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torchvision", version = "0.26.0", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torchvision", version = "0.26.0", source = { registry = "https://pypi.jetson-ai-lab.io/jp6/cu126/+simple" }, marker = "extra == 'extra-16-inference-models-torch-jp6-cu126' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126')" }, + { name = "torchvision", version = "0.26.0", source = { registry = "https://pypi.org/simple" }, marker = "extra == 'extra-16-inference-models-torch-cu126' or extra == 'extra-16-inference-models-torch-cu130' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torchvision", version = "0.26.0+cpu", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torchvision", version = "0.26.0+cu128", source = { registry = "https://download.pytorch.org/whl/cu128" }, marker = "extra == 'extra-16-inference-models-torch-cu128' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/bc/0c/66b0f9b4a4cb9ffdac7b52b17b37c7d3c4f75623b469e388b0c6d89b4e88/timm-1.0.15.tar.gz", hash = "sha256:756a3bc30c96565f056e608a9b559daed904617eaadb6be536f96874879b1055", size = 2230258, upload-time = "2025-02-23T05:05:55.959Z" } +sdist = { url = "https://files.pythonhosted.org/packages/7b/1e/e924b3b2326a856aaf68586f9c52a5fc81ef45715eca408393b68c597e0e/timm-1.0.26.tar.gz", hash = "sha256:f66f082f2f381cf68431c22714c8b70f723837fa2a185b155961eab90f2d5b10", size = 2419859, upload-time = "2026-03-23T18:12:10.272Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/6c/d0/179abca8b984b3deefd996f362b612c39da73b60f685921e6cd58b6125b4/timm-1.0.15-py3-none-any.whl", hash = "sha256:5a3dc460c24e322ecc7fd1f3e3eb112423ddee320cb059cc1956fbc9731748ef", size = 2361373, upload-time = "2025-02-23T05:05:53.601Z" }, + { url = "https://files.pythonhosted.org/packages/6f/e9/bebf3d50e3fc847378988235f87c37ad3ac26d386041ab915d15e92025cd/timm-1.0.26-py3-none-any.whl", hash = "sha256:985c330de5ccc3a2aa0224eb7272e6a336084702390bb7e3801f3c91603d3683", size = 2568766, upload-time = "2026-03-23T18:12:08.062Z" }, ] [[package]] @@ -4530,31 +14597,29 @@ wheels = [ [[package]] name = "tomli" -version = "2.2.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/18/87/302344fed471e44a87289cf4967697d07e532f2421fdaf868a303cbae4ff/tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff", size = 17175, upload-time = "2024-11-27T22:38:36.873Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/43/ca/75707e6efa2b37c77dadb324ae7d9571cb424e61ea73fad7c56c2d14527f/tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249", size = 131077, upload-time = "2024-11-27T22:37:54.956Z" }, - { url = "https://files.pythonhosted.org/packages/c7/16/51ae563a8615d472fdbffc43a3f3d46588c264ac4f024f63f01283becfbb/tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6", size = 123429, upload-time = "2024-11-27T22:37:56.698Z" }, - { url = "https://files.pythonhosted.org/packages/f1/dd/4f6cd1e7b160041db83c694abc78e100473c15d54620083dbd5aae7b990e/tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a", size = 226067, upload-time = "2024-11-27T22:37:57.63Z" }, - { url = "https://files.pythonhosted.org/packages/a9/6b/c54ede5dc70d648cc6361eaf429304b02f2871a345bbdd51e993d6cdf550/tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee", size = 236030, upload-time = "2024-11-27T22:37:59.344Z" }, - { url = "https://files.pythonhosted.org/packages/1f/47/999514fa49cfaf7a92c805a86c3c43f4215621855d151b61c602abb38091/tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e", size = 240898, upload-time = "2024-11-27T22:38:00.429Z" }, - { url = "https://files.pythonhosted.org/packages/73/41/0a01279a7ae09ee1573b423318e7934674ce06eb33f50936655071d81a24/tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4", size = 229894, upload-time = "2024-11-27T22:38:02.094Z" }, - { url = "https://files.pythonhosted.org/packages/55/18/5d8bc5b0a0362311ce4d18830a5d28943667599a60d20118074ea1b01bb7/tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106", size = 245319, upload-time = "2024-11-27T22:38:03.206Z" }, - { url = "https://files.pythonhosted.org/packages/92/a3/7ade0576d17f3cdf5ff44d61390d4b3febb8a9fc2b480c75c47ea048c646/tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8", size = 238273, upload-time = "2024-11-27T22:38:04.217Z" }, - { url = "https://files.pythonhosted.org/packages/72/6f/fa64ef058ac1446a1e51110c375339b3ec6be245af9d14c87c4a6412dd32/tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff", size = 98310, upload-time = "2024-11-27T22:38:05.908Z" }, - { url = "https://files.pythonhosted.org/packages/6a/1c/4a2dcde4a51b81be3530565e92eda625d94dafb46dbeb15069df4caffc34/tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b", size = 108309, upload-time = "2024-11-27T22:38:06.812Z" }, - { url = "https://files.pythonhosted.org/packages/52/e1/f8af4c2fcde17500422858155aeb0d7e93477a0d59a98e56cbfe75070fd0/tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea", size = 132762, upload-time = "2024-11-27T22:38:07.731Z" }, - { url = "https://files.pythonhosted.org/packages/03/b8/152c68bb84fc00396b83e7bbddd5ec0bd3dd409db4195e2a9b3e398ad2e3/tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8", size = 123453, upload-time = "2024-11-27T22:38:09.384Z" }, - { url = "https://files.pythonhosted.org/packages/c8/d6/fc9267af9166f79ac528ff7e8c55c8181ded34eb4b0e93daa767b8841573/tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192", size = 233486, upload-time = "2024-11-27T22:38:10.329Z" }, - { url = "https://files.pythonhosted.org/packages/5c/51/51c3f2884d7bab89af25f678447ea7d297b53b5a3b5730a7cb2ef6069f07/tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222", size = 242349, upload-time = "2024-11-27T22:38:11.443Z" }, - { url = "https://files.pythonhosted.org/packages/ab/df/bfa89627d13a5cc22402e441e8a931ef2108403db390ff3345c05253935e/tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77", size = 252159, upload-time = "2024-11-27T22:38:13.099Z" }, - { url = "https://files.pythonhosted.org/packages/9e/6e/fa2b916dced65763a5168c6ccb91066f7639bdc88b48adda990db10c8c0b/tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6", size = 237243, upload-time = "2024-11-27T22:38:14.766Z" }, - { url = "https://files.pythonhosted.org/packages/b4/04/885d3b1f650e1153cbb93a6a9782c58a972b94ea4483ae4ac5cedd5e4a09/tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd", size = 259645, upload-time = "2024-11-27T22:38:15.843Z" }, - { url = "https://files.pythonhosted.org/packages/9c/de/6b432d66e986e501586da298e28ebeefd3edc2c780f3ad73d22566034239/tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e", size = 244584, upload-time = "2024-11-27T22:38:17.645Z" }, - { url = "https://files.pythonhosted.org/packages/1c/9a/47c0449b98e6e7d1be6cbac02f93dd79003234ddc4aaab6ba07a9a7482e2/tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98", size = 98875, upload-time = "2024-11-27T22:38:19.159Z" }, - { url = "https://files.pythonhosted.org/packages/ef/60/9b9638f081c6f1261e2688bd487625cd1e660d0a85bd469e91d8db969734/tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4", size = 109418, upload-time = "2024-11-27T22:38:20.064Z" }, - { url = "https://files.pythonhosted.org/packages/6e/c2/61d3e0f47e2b74ef40a68b9e6ad5984f6241a942f7cd3bbfbdbd03861ea9/tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc", size = 14257, upload-time = "2024-11-27T22:38:35.385Z" }, +version = "2.4.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/22/de/48c59722572767841493b26183a0d1cc411d54fd759c5607c4590b6563a6/tomli-2.4.1.tar.gz", hash = "sha256:7c7e1a961a0b2f2472c1ac5b69affa0ae1132c39adcb67aba98568702b9cc23f", size = 17543, upload-time = "2026-03-25T20:22:03.828Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f4/11/db3d5885d8528263d8adc260bb2d28ebf1270b96e98f0e0268d32b8d9900/tomli-2.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f8f0fc26ec2cc2b965b7a3b87cd19c5c6b8c5e5f436b984e85f486d652285c30", size = 154704, upload-time = "2026-03-25T20:21:10.473Z" }, + { url = "https://files.pythonhosted.org/packages/6d/f7/675db52c7e46064a9aa928885a9b20f4124ecb9bc2e1ce74c9106648d202/tomli-2.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4ab97e64ccda8756376892c53a72bd1f964e519c77236368527f758fbc36a53a", size = 149454, upload-time = "2026-03-25T20:21:12.036Z" }, + { url = "https://files.pythonhosted.org/packages/61/71/81c50943cf953efa35bce7646caab3cf457a7d8c030b27cfb40d7235f9ee/tomli-2.4.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:96481a5786729fd470164b47cdb3e0e58062a496f455ee41b4403be77cb5a076", size = 237561, upload-time = "2026-03-25T20:21:13.098Z" }, + { url = "https://files.pythonhosted.org/packages/48/c1/f41d9cb618acccca7df82aaf682f9b49013c9397212cb9f53219e3abac37/tomli-2.4.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5a881ab208c0baf688221f8cecc5401bd291d67e38a1ac884d6736cbcd8247e9", size = 243824, upload-time = "2026-03-25T20:21:14.569Z" }, + { url = "https://files.pythonhosted.org/packages/22/e4/5a816ecdd1f8ca51fb756ef684b90f2780afc52fc67f987e3c61d800a46d/tomli-2.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:47149d5bd38761ac8be13a84864bf0b7b70bc051806bc3669ab1cbc56216b23c", size = 242227, upload-time = "2026-03-25T20:21:15.712Z" }, + { url = "https://files.pythonhosted.org/packages/6b/49/2b2a0ef529aa6eec245d25f0c703e020a73955ad7edf73e7f54ddc608aa5/tomli-2.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ec9bfaf3ad2df51ace80688143a6a4ebc09a248f6ff781a9945e51937008fcbc", size = 247859, upload-time = "2026-03-25T20:21:17.001Z" }, + { url = "https://files.pythonhosted.org/packages/83/bd/6c1a630eaca337e1e78c5903104f831bda934c426f9231429396ce3c3467/tomli-2.4.1-cp311-cp311-win32.whl", hash = "sha256:ff2983983d34813c1aeb0fa89091e76c3a22889ee83ab27c5eeb45100560c049", size = 97204, upload-time = "2026-03-25T20:21:18.079Z" }, + { url = "https://files.pythonhosted.org/packages/42/59/71461df1a885647e10b6bb7802d0b8e66480c61f3f43079e0dcd315b3954/tomli-2.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:5ee18d9ebdb417e384b58fe414e8d6af9f4e7a0ae761519fb50f721de398dd4e", size = 108084, upload-time = "2026-03-25T20:21:18.978Z" }, + { url = "https://files.pythonhosted.org/packages/b8/83/dceca96142499c069475b790e7913b1044c1a4337e700751f48ed723f883/tomli-2.4.1-cp311-cp311-win_arm64.whl", hash = "sha256:c2541745709bad0264b7d4705ad453b76ccd191e64aa6f0fc66b69a293a45ece", size = 95285, upload-time = "2026-03-25T20:21:20.309Z" }, + { url = "https://files.pythonhosted.org/packages/c1/ba/42f134a3fe2b370f555f44b1d72feebb94debcab01676bf918d0cb70e9aa/tomli-2.4.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c742f741d58a28940ce01d58f0ab2ea3ced8b12402f162f4d534dfe18ba1cd6a", size = 155924, upload-time = "2026-03-25T20:21:21.626Z" }, + { url = "https://files.pythonhosted.org/packages/dc/c7/62d7a17c26487ade21c5422b646110f2162f1fcc95980ef7f63e73c68f14/tomli-2.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7f86fd587c4ed9dd76f318225e7d9b29cfc5a9d43de44e5754db8d1128487085", size = 150018, upload-time = "2026-03-25T20:21:23.002Z" }, + { url = "https://files.pythonhosted.org/packages/5c/05/79d13d7c15f13bdef410bdd49a6485b1c37d28968314eabee452c22a7fda/tomli-2.4.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ff18e6a727ee0ab0388507b89d1bc6a22b138d1e2fa56d1ad494586d61d2eae9", size = 244948, upload-time = "2026-03-25T20:21:24.04Z" }, + { url = "https://files.pythonhosted.org/packages/10/90/d62ce007a1c80d0b2c93e02cab211224756240884751b94ca72df8a875ca/tomli-2.4.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:136443dbd7e1dee43c68ac2694fde36b2849865fa258d39bf822c10e8068eac5", size = 253341, upload-time = "2026-03-25T20:21:25.177Z" }, + { url = "https://files.pythonhosted.org/packages/1a/7e/caf6496d60152ad4ed09282c1885cca4eea150bfd007da84aea07bcc0a3e/tomli-2.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5e262d41726bc187e69af7825504c933b6794dc3fbd5945e41a79bb14c31f585", size = 248159, upload-time = "2026-03-25T20:21:26.364Z" }, + { url = "https://files.pythonhosted.org/packages/99/e7/c6f69c3120de34bbd882c6fba7975f3d7a746e9218e56ab46a1bc4b42552/tomli-2.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:5cb41aa38891e073ee49d55fbc7839cfdb2bc0e600add13874d048c94aadddd1", size = 253290, upload-time = "2026-03-25T20:21:27.46Z" }, + { url = "https://files.pythonhosted.org/packages/d6/2f/4a3c322f22c5c66c4b836ec58211641a4067364f5dcdd7b974b4c5da300c/tomli-2.4.1-cp312-cp312-win32.whl", hash = "sha256:da25dc3563bff5965356133435b757a795a17b17d01dbc0f42fb32447ddfd917", size = 98141, upload-time = "2026-03-25T20:21:28.492Z" }, + { url = "https://files.pythonhosted.org/packages/24/22/4daacd05391b92c55759d55eaee21e1dfaea86ce5c571f10083360adf534/tomli-2.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:52c8ef851d9a240f11a88c003eacb03c31fc1c9c4ec64a99a0f922b93874fda9", size = 108847, upload-time = "2026-03-25T20:21:29.386Z" }, + { url = "https://files.pythonhosted.org/packages/68/fd/70e768887666ddd9e9f5d85129e84910f2db2796f9096aa02b721a53098d/tomli-2.4.1-cp312-cp312-win_arm64.whl", hash = "sha256:f758f1b9299d059cc3f6546ae2af89670cb1c4d48ea29c3cacc4fe7de3058257", size = 95088, upload-time = "2026-03-25T20:21:30.677Z" }, + { url = "https://files.pythonhosted.org/packages/7b/61/cceae43728b7de99d9b847560c262873a1f6c98202171fd5ed62640b494b/tomli-2.4.1-py3-none-any.whl", hash = "sha256:0d85819802132122da43cb86656f8d1f8c6587d54ae7dcaf30e90533028b49fe", size = 14583, upload-time = "2026-03-25T20:22:03.012Z" }, ] [[package]] @@ -4562,274 +14627,852 @@ name = "torch" version = "2.6.0+cu124" source = { registry = "https://download.pytorch.org/whl/cu124" } resolution-markers = [ - "python_full_version >= '3.12' and sys_platform == 'darwin'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin'", "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux'", - "(python_full_version >= '3.12' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.12' and sys_platform != 'darwin' and sys_platform != 'linux')", - "python_full_version == '3.11.*' and sys_platform == 'darwin'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin'", "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux'", - "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.11.*' and sys_platform != 'darwin' and sys_platform != 'linux')", - "python_full_version < '3.11' and sys_platform == 'darwin'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin'", "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux'", - "(python_full_version < '3.11' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform != 'darwin' and sys_platform != 'linux')", -] -dependencies = [ - { name = "filelock", marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "fsspec", marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "jinja2", marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "networkx", version = "3.4.2", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "networkx", version = "3.5", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "nvidia-cublas-cu12", version = "12.4.5.8", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine == 'x86_64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine == 'x86_64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "nvidia-cuda-cupti-cu12", version = "12.4.127", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine == 'x86_64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine == 'x86_64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "nvidia-cuda-nvrtc-cu12", version = "12.4.127", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine == 'x86_64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine == 'x86_64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "nvidia-cuda-runtime-cu12", version = "12.4.127", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine == 'x86_64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine == 'x86_64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "nvidia-cudnn-cu12", version = "9.1.0.70", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine == 'x86_64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine == 'x86_64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "nvidia-cufft-cu12", version = "11.2.1.3", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine == 'x86_64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine == 'x86_64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "nvidia-curand-cu12", version = "10.3.5.147", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine == 'x86_64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine == 'x86_64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "nvidia-cusolver-cu12", version = "11.6.1.9", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine == 'x86_64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine == 'x86_64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "nvidia-cusparse-cu12", version = "12.3.1.170", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine == 'x86_64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine == 'x86_64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "nvidia-cusparselt-cu12", version = "0.6.2", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine == 'x86_64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine == 'x86_64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "nvidia-nccl-cu12", version = "2.21.5", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine == 'x86_64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine == 'x86_64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "nvidia-nvjitlink-cu12", version = "12.4.127", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine == 'x86_64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine == 'x86_64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "nvidia-nvtx-cu12", version = "12.4.127", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine == 'x86_64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine == 'x86_64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "setuptools", marker = "(python_full_version >= '3.12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.12' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "sympy", version = "1.13.1", source = { registry = "https://pypi.org/simple" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "triton", version = "3.2.0", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine == 'x86_64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine == 'x86_64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "typing-extensions", marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, -] -wheels = [ - { url = "https://download.pytorch.org/whl/cu124/torch-2.6.0%2Bcu124-cp310-cp310-linux_x86_64.whl", hash = "sha256:7f2ba7f7c0459320a521696f6b5bccc187f59890b23c9dfb6c49b0b87c6bfc97" }, - { url = "https://download.pytorch.org/whl/cu124/torch-2.6.0%2Bcu124-cp310-cp310-win_amd64.whl", hash = "sha256:7cc45c5b39d74875cfafe908b7f55c544147cc16b01e795feb2fe766583efe78" }, - { url = "https://download.pytorch.org/whl/cu124/torch-2.6.0%2Bcu124-cp311-cp311-linux_x86_64.whl", hash = "sha256:d4c3e9a8d31a7c0fcbb9da17c31a1917e1fac26c566a4cfbd8c9568ad7cade79" }, - { url = "https://download.pytorch.org/whl/cu124/torch-2.6.0%2Bcu124-cp311-cp311-win_amd64.whl", hash = "sha256:6a1fb2714e9323f11edb6e8abf7aad5f79e45ad25c081cde87681a18d99c29eb" }, - { url = "https://download.pytorch.org/whl/cu124/torch-2.6.0%2Bcu124-cp312-cp312-linux_x86_64.whl", hash = "sha256:a393b506844035c0dac2f30ea8478c343b8e95a429f06f3b3cadfc7f53adb597" }, - { url = "https://download.pytorch.org/whl/cu124/torch-2.6.0%2Bcu124-cp312-cp312-win_amd64.whl", hash = "sha256:3313061c1fec4c7310cf47944e84513dcd27b6173b72a349bb7ca68d0ee6e9c0" }, + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin'", +] +dependencies = [ + { name = "filelock", marker = "extra == 'extra-16-inference-models-torch-cu124' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "fsspec", version = "2026.3.0", source = { registry = "https://pypi.org/simple" }, marker = "extra == 'extra-16-inference-models-torch-cu124' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "jinja2", marker = "extra == 'extra-16-inference-models-torch-cu124' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "networkx", version = "3.4.2", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "networkx", version = "3.6.1", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "nvidia-cublas-cu12", version = "12.4.5.8", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine == 'x86_64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "nvidia-cuda-cupti-cu12", version = "12.4.127", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine == 'x86_64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "nvidia-cuda-nvrtc-cu12", version = "12.4.127", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine == 'x86_64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "nvidia-cuda-runtime-cu12", version = "12.4.127", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine == 'x86_64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "nvidia-cudnn-cu12", version = "9.1.0.70", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine == 'x86_64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "nvidia-cufft-cu12", version = "11.2.1.3", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine == 'x86_64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "nvidia-curand-cu12", version = "10.3.5.147", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine == 'x86_64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "nvidia-cusolver-cu12", version = "11.6.1.9", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine == 'x86_64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "nvidia-cusparse-cu12", version = "12.3.1.170", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine == 'x86_64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "nvidia-cusparselt-cu12", version = "0.6.2", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine == 'x86_64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "nvidia-nccl-cu12", version = "2.21.5", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine == 'x86_64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "nvidia-nvjitlink-cu12", version = "12.4.127", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine == 'x86_64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "nvidia-nvtx-cu12", version = "12.4.127", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine == 'x86_64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "setuptools", marker = "(python_full_version >= '3.12' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "sympy", version = "1.13.1", source = { registry = "https://pypi.org/simple" }, marker = "extra == 'extra-16-inference-models-torch-cu124' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "triton", version = "3.2.0", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine == 'x86_64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "typing-extensions", marker = "extra == 'extra-16-inference-models-torch-cu124' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, +] +wheels = [ + { url = "https://download-r2.pytorch.org/whl/cu124/torch-2.6.0%2Bcu124-cp310-cp310-linux_x86_64.whl", hash = "sha256:7f2ba7f7c0459320a521696f6b5bccc187f59890b23c9dfb6c49b0b87c6bfc97" }, + { url = "https://download-r2.pytorch.org/whl/cu124/torch-2.6.0%2Bcu124-cp310-cp310-win_amd64.whl", hash = "sha256:7cc45c5b39d74875cfafe908b7f55c544147cc16b01e795feb2fe766583efe78" }, + { url = "https://download-r2.pytorch.org/whl/cu124/torch-2.6.0%2Bcu124-cp311-cp311-linux_x86_64.whl", hash = "sha256:d4c3e9a8d31a7c0fcbb9da17c31a1917e1fac26c566a4cfbd8c9568ad7cade79" }, + { url = "https://download-r2.pytorch.org/whl/cu124/torch-2.6.0%2Bcu124-cp311-cp311-win_amd64.whl", hash = "sha256:6a1fb2714e9323f11edb6e8abf7aad5f79e45ad25c081cde87681a18d99c29eb" }, + { url = "https://download-r2.pytorch.org/whl/cu124/torch-2.6.0%2Bcu124-cp312-cp312-linux_x86_64.whl", hash = "sha256:a393b506844035c0dac2f30ea8478c343b8e95a429f06f3b3cadfc7f53adb597" }, + { url = "https://download-r2.pytorch.org/whl/cu124/torch-2.6.0%2Bcu124-cp312-cp312-win_amd64.whl", hash = "sha256:3313061c1fec4c7310cf47944e84513dcd27b6173b72a349bb7ca68d0ee6e9c0" }, ] [[package]] name = "torch" -version = "2.7.1" -source = { registry = "https://download.pytorch.org/whl/cpu" } +version = "2.7.1+cu118" +source = { registry = "https://download.pytorch.org/whl/cu118" } resolution-markers = [ - "python_full_version >= '3.12' and sys_platform == 'darwin'", - "python_full_version == '3.11.*' and sys_platform == 'darwin'", - "python_full_version < '3.11' and sys_platform == 'darwin'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin'", ] dependencies = [ - { name = "filelock", marker = "(sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "fsspec", marker = "(sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "jinja2", marker = "(sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "networkx", version = "3.4.2", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "networkx", version = "3.5", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "setuptools", marker = "(python_full_version >= '3.12' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.12' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "sympy", version = "1.14.0", source = { registry = "https://pypi.org/simple" }, marker = "(sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "typing-extensions", marker = "(sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, -] -wheels = [ - { url = "https://download.pytorch.org/whl/cpu/torch-2.7.1-cp310-none-macosx_11_0_arm64.whl", hash = "sha256:f8c3bee261b0c8e090f6347490dc6ee2aebfd661eb0f3f6aeae06d992d8ed56f" }, - { url = "https://download.pytorch.org/whl/cpu/torch-2.7.1-cp311-none-macosx_11_0_arm64.whl", hash = "sha256:68a352c7f435abb5cb47e2c032dcd1012772ae2bacb6fc8b83b0c1b11874ab3a" }, - { url = "https://download.pytorch.org/whl/cpu/torch-2.7.1-cp312-none-macosx_11_0_arm64.whl", hash = "sha256:7b4f8b2b83bd08f7d399025a9a7b323bdbb53d20566f1e0d584689bb92d82f9a" }, + { name = "filelock", marker = "extra == 'extra-16-inference-models-torch-cu118' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "fsspec", version = "2026.3.0", source = { registry = "https://pypi.org/simple" }, marker = "extra == 'extra-16-inference-models-torch-cu118' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "jinja2", marker = "extra == 'extra-16-inference-models-torch-cu118' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "networkx", version = "3.4.2", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "networkx", version = "3.6.1", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "nvidia-cublas-cu11", marker = "(platform_machine == 'x86_64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu118') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "nvidia-cuda-cupti-cu11", marker = "(platform_machine == 'x86_64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu118') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "nvidia-cuda-nvrtc-cu11", marker = "(platform_machine == 'x86_64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu118') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "nvidia-cuda-runtime-cu11", marker = "(platform_machine == 'x86_64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu118') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "nvidia-cudnn-cu11", marker = "(platform_machine == 'x86_64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu118') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "nvidia-cufft-cu11", marker = "(platform_machine == 'x86_64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu118') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "nvidia-curand-cu11", marker = "(platform_machine == 'x86_64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu118') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "nvidia-cusolver-cu11", marker = "(platform_machine == 'x86_64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu118') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "nvidia-cusparse-cu11", marker = "(platform_machine == 'x86_64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu118') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "nvidia-nccl-cu11", marker = "(platform_machine == 'x86_64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu118') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "nvidia-nvtx-cu11", marker = "(platform_machine == 'x86_64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu118') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "setuptools", marker = "(python_full_version >= '3.12' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "sympy", version = "1.14.0", source = { registry = "https://pypi.org/simple" }, marker = "extra == 'extra-16-inference-models-torch-cu118' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "triton", version = "3.3.1", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine == 'x86_64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu118') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "typing-extensions", marker = "extra == 'extra-16-inference-models-torch-cu118' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, +] +wheels = [ + { url = "https://download-r2.pytorch.org/whl/cu118/torch-2.7.1%2Bcu118-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:89433c62b02ec802d4c0887c867d935887ae8f00d7cc549ecf1c2640d096bd4c" }, + { url = "https://download-r2.pytorch.org/whl/cu118/torch-2.7.1%2Bcu118-cp310-cp310-win_amd64.whl", hash = "sha256:af4833e36a8e964681a4dad7775f559cf043bd42c9d0c0b5e0619f9d0e44cb56" }, + { url = "https://download-r2.pytorch.org/whl/cu118/torch-2.7.1%2Bcu118-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:a596d91c747d1fa601724e85b9c8797c8d7c62140aa1acf245773e911254bc45" }, + { url = "https://download-r2.pytorch.org/whl/cu118/torch-2.7.1%2Bcu118-cp311-cp311-win_amd64.whl", hash = "sha256:584e5ee99d29286b93be2fba3b3f1f5b9d7a4b9055a288eb31b33100a1f09ed9" }, + { url = "https://download-r2.pytorch.org/whl/cu118/torch-2.7.1%2Bcu118-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:91454dcfdb81f181fdf216d6d6d9912fbd8795578b90384b3b8b8132737072bb" }, + { url = "https://download-r2.pytorch.org/whl/cu118/torch-2.7.1%2Bcu118-cp312-cp312-win_amd64.whl", hash = "sha256:80855ec840b7b06372ff43535d01393a8ec101842618d1f9ed629572b52aed71" }, ] [[package]] name = "torch" -version = "2.7.1+cpu" +version = "2.11.0" source = { registry = "https://download.pytorch.org/whl/cpu" } resolution-markers = [ - "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux'", - "(python_full_version >= '3.12' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.12' and sys_platform != 'darwin' and sys_platform != 'linux')", - "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux'", - "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.11.*' and sys_platform != 'darwin' and sys_platform != 'linux')", - "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux'", - "(python_full_version < '3.11' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform != 'darwin' and sys_platform != 'linux')", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin'", ] dependencies = [ - { name = "filelock", marker = "(sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "fsspec", marker = "(sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "jinja2", marker = "(sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "networkx", version = "3.4.2", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "networkx", version = "3.5", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "setuptools", marker = "(python_full_version >= '3.12' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.12' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "sympy", version = "1.14.0", source = { registry = "https://pypi.org/simple" }, marker = "(sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "typing-extensions", marker = "(sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "filelock", marker = "(sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "fsspec", version = "2026.3.0", source = { registry = "https://pypi.org/simple" }, marker = "(sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "jinja2", marker = "(sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "networkx", version = "3.4.2", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "networkx", version = "3.6.1", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "setuptools", marker = "(sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "sympy", version = "1.14.0", source = { registry = "https://pypi.org/simple" }, marker = "(sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "typing-extensions", marker = "(sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, ] wheels = [ - { url = "https://download.pytorch.org/whl/cpu/torch-2.7.1%2Bcpu-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:c0df17cee97653d09a4e84488a33d21217f9b24208583c55cf28f0045aab0766" }, - { url = "https://download.pytorch.org/whl/cpu/torch-2.7.1%2Bcpu-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:1f04a373a3f643821f721da9898ef77dce73b5b6bfc64486f0976f7fb5f90e83" }, - { url = "https://download.pytorch.org/whl/cpu/torch-2.7.1%2Bcpu-cp310-cp310-win_amd64.whl", hash = "sha256:b4cc706973655151f198d027ed34c92ab31a3db55676b44251194e1280631426" }, - { url = "https://download.pytorch.org/whl/cpu/torch-2.7.1%2Bcpu-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:5fe6045b8f426bf2d0426e4fe009f1667a954ec2aeb82f1bd0bf60c6d7a85445" }, - { url = "https://download.pytorch.org/whl/cpu/torch-2.7.1%2Bcpu-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:a1684793e352f03fa14f78857e55d65de4ada8405ded1da2bf4f452179c4b779" }, - { url = "https://download.pytorch.org/whl/cpu/torch-2.7.1%2Bcpu-cp311-cp311-win_amd64.whl", hash = "sha256:7b977eccbc85ae2bd19d6998de7b1f1f4bd3c04eaffd3015deb7934389783399" }, - { url = "https://download.pytorch.org/whl/cpu/torch-2.7.1%2Bcpu-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:3bf2db5adf77b433844f080887ade049c4705ddf9fe1a32023ff84ff735aa5ad" }, - { url = "https://download.pytorch.org/whl/cpu/torch-2.7.1%2Bcpu-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:8f8b3cfc53010a4b4a3c7ecb88c212e9decc4f5eeb6af75c3c803937d2d60947" }, - { url = "https://download.pytorch.org/whl/cpu/torch-2.7.1%2Bcpu-cp312-cp312-win_amd64.whl", hash = "sha256:0bc887068772233f532b51a3e8c8cfc682ae62bef74bf4e0c53526c8b9e4138f" }, - { url = "https://download.pytorch.org/whl/cpu/torch-2.7.1%2Bcpu-cp312-cp312-win_arm64.whl", hash = "sha256:a2618775f32eb4126c5b2050686da52001a08cffa331637d9cf51c8250931e00" }, + { url = "https://download-r2.pytorch.org/whl/cpu/torch-2.11.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:91209c7d8a2460b76e8ff5b28b7623da4ab1d27474b79e1de83e954871985afe" }, + { url = "https://download-r2.pytorch.org/whl/cpu/torch-2.11.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d75eadcd97fe0dc7cd0eedc4d72152484c19cb2cfe46ce55766c8e129116425f" }, + { url = "https://download-r2.pytorch.org/whl/cpu/torch-2.11.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:43b35116802c85fb88d99f4a396b8bd4472bfca1dd82e69499e5a4f9b8b4e252" }, ] [[package]] name = "torch" -version = "2.7.1+cu118" -source = { registry = "https://download.pytorch.org/whl/cu118" } +version = "2.11.0" +source = { registry = "https://pypi.jetson-ai-lab.io/jp6/cu126/+simple" } resolution-markers = [ - "python_full_version >= '3.12' and sys_platform == 'darwin'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin'", "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux'", - "(python_full_version >= '3.12' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.12' and sys_platform != 'darwin' and sys_platform != 'linux')", - "python_full_version == '3.11.*' and sys_platform == 'darwin'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin'", "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux'", - "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.11.*' and sys_platform != 'darwin' and sys_platform != 'linux')", - "python_full_version < '3.11' and sys_platform == 'darwin'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin'", "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux'", - "(python_full_version < '3.11' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform != 'darwin' and sys_platform != 'linux')", -] -dependencies = [ - { name = "filelock", marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "fsspec", marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "jinja2", marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "networkx", version = "3.4.2", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "networkx", version = "3.5", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "nvidia-cublas-cu11", marker = "(platform_machine == 'x86_64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (platform_machine == 'x86_64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "nvidia-cuda-cupti-cu11", marker = "(platform_machine == 'x86_64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (platform_machine == 'x86_64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "nvidia-cuda-nvrtc-cu11", marker = "(platform_machine == 'x86_64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (platform_machine == 'x86_64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "nvidia-cuda-runtime-cu11", marker = "(platform_machine == 'x86_64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (platform_machine == 'x86_64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "nvidia-cudnn-cu11", marker = "(platform_machine == 'x86_64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (platform_machine == 'x86_64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "nvidia-cufft-cu11", marker = "(platform_machine == 'x86_64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (platform_machine == 'x86_64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "nvidia-curand-cu11", marker = "(platform_machine == 'x86_64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (platform_machine == 'x86_64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "nvidia-cusolver-cu11", marker = "(platform_machine == 'x86_64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (platform_machine == 'x86_64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "nvidia-cusparse-cu11", marker = "(platform_machine == 'x86_64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (platform_machine == 'x86_64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "nvidia-nccl-cu11", marker = "(platform_machine == 'x86_64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (platform_machine == 'x86_64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "nvidia-nvtx-cu11", marker = "(platform_machine == 'x86_64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (platform_machine == 'x86_64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "setuptools", marker = "(python_full_version >= '3.12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.12' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "sympy", version = "1.14.0", source = { registry = "https://pypi.org/simple" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "triton", version = "3.3.1", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine == 'x86_64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (platform_machine == 'x86_64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "typing-extensions", marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, -] -wheels = [ - { url = "https://download.pytorch.org/whl/cu118/torch-2.7.1%2Bcu118-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:89433c62b02ec802d4c0887c867d935887ae8f00d7cc549ecf1c2640d096bd4c" }, - { url = "https://download.pytorch.org/whl/cu118/torch-2.7.1%2Bcu118-cp310-cp310-win_amd64.whl", hash = "sha256:af4833e36a8e964681a4dad7775f559cf043bd42c9d0c0b5e0619f9d0e44cb56" }, - { url = "https://download.pytorch.org/whl/cu118/torch-2.7.1%2Bcu118-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:a596d91c747d1fa601724e85b9c8797c8d7c62140aa1acf245773e911254bc45" }, - { url = "https://download.pytorch.org/whl/cu118/torch-2.7.1%2Bcu118-cp311-cp311-win_amd64.whl", hash = "sha256:584e5ee99d29286b93be2fba3b3f1f5b9d7a4b9055a288eb31b33100a1f09ed9" }, - { url = "https://download.pytorch.org/whl/cu118/torch-2.7.1%2Bcu118-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:91454dcfdb81f181fdf216d6d6d9912fbd8795578b90384b3b8b8132737072bb" }, - { url = "https://download.pytorch.org/whl/cu118/torch-2.7.1%2Bcu118-cp312-cp312-win_amd64.whl", hash = "sha256:80855ec840b7b06372ff43535d01393a8ec101842618d1f9ed629572b52aed71" }, + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin'", +] +dependencies = [ + { name = "filelock", marker = "extra == 'extra-16-inference-models-torch-jp6-cu126' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126')" }, + { name = "fsspec", version = "2026.3.0", source = { registry = "https://pypi.org/simple" }, marker = "extra == 'extra-16-inference-models-torch-jp6-cu126' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126')" }, + { name = "jinja2", marker = "extra == 'extra-16-inference-models-torch-jp6-cu126' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126')" }, + { name = "networkx", version = "3.4.2", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "networkx", version = "3.6.1", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "setuptools", marker = "extra == 'extra-16-inference-models-torch-jp6-cu126' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126')" }, + { name = "sympy", version = "1.14.0", source = { registry = "https://pypi.org/simple" }, marker = "extra == 'extra-16-inference-models-torch-jp6-cu126' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126')" }, + { name = "typing-extensions", marker = "extra == 'extra-16-inference-models-torch-jp6-cu126' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126')" }, +] +wheels = [ + { url = "https://pypi.jetson-ai-lab.io/jp6/cu126/+f/46b/b8b13f844b211/torch-2.11.0-cp310-cp310-linux_aarch64.whl", hash = "sha256:46bb8b13f844b2111f6f41abc703d01075a7185e0c587cb002e957ede2b8c8a3" }, ] [[package]] name = "torch" -version = "2.7.1+cu128" -source = { registry = "https://download.pytorch.org/whl/cu128" } +version = "2.11.0" +source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "python_full_version >= '3.12' and sys_platform == 'darwin'", - "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux'", - "(python_full_version >= '3.12' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.12' and sys_platform != 'darwin' and sys_platform != 'linux')", - "python_full_version == '3.11.*' and sys_platform == 'darwin'", - "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux'", - "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.11.*' and sys_platform != 'darwin' and sys_platform != 'linux')", - "python_full_version < '3.11' and sys_platform == 'darwin'", - "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux'", - "(python_full_version < '3.11' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform != 'darwin' and sys_platform != 'linux')", -] -dependencies = [ - { name = "filelock", marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "fsspec", marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "jinja2", marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "networkx", version = "3.4.2", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "networkx", version = "3.5", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "nvidia-cublas-cu12", version = "12.8.3.14", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine == 'x86_64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine == 'x86_64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "nvidia-cuda-cupti-cu12", version = "12.8.57", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine == 'x86_64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine == 'x86_64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "nvidia-cuda-nvrtc-cu12", version = "12.8.61", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine == 'x86_64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine == 'x86_64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "nvidia-cuda-runtime-cu12", version = "12.8.57", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine == 'x86_64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine == 'x86_64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "nvidia-cudnn-cu12", version = "9.7.1.26", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine == 'x86_64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine == 'x86_64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "nvidia-cufft-cu12", version = "11.3.3.41", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine == 'x86_64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine == 'x86_64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "nvidia-cufile-cu12", marker = "(platform_machine == 'x86_64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine == 'x86_64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "nvidia-curand-cu12", version = "10.3.9.55", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine == 'x86_64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine == 'x86_64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "nvidia-cusolver-cu12", version = "11.7.2.55", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine == 'x86_64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine == 'x86_64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "nvidia-cusparse-cu12", version = "12.5.7.53", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine == 'x86_64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine == 'x86_64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "nvidia-cusparselt-cu12", version = "0.6.3", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine == 'x86_64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine == 'x86_64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "nvidia-nccl-cu12", version = "2.26.2", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine == 'x86_64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine == 'x86_64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "nvidia-nvjitlink-cu12", version = "12.8.61", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine == 'x86_64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine == 'x86_64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "nvidia-nvtx-cu12", version = "12.8.55", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine == 'x86_64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine == 'x86_64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'x86_64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "setuptools", marker = "(python_full_version >= '3.12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.12' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "sympy", version = "1.14.0", source = { registry = "https://pypi.org/simple" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "triton", version = "3.3.1", source = { registry = "https://pypi.org/simple" }, marker = "(sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "typing-extensions", marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, -] -wheels = [ - { url = "https://download.pytorch.org/whl/cu128/torch-2.7.1%2Bcu128-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:aca3472608e3c92df5166537595687b53a6c997082478b372427b043dbed98d0" }, - { url = "https://download.pytorch.org/whl/cu128/torch-2.7.1%2Bcu128-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:d6c3cba198dc93f93422a8545f48a6697890366e4b9701f54351fc27e2304bd3" }, - { url = "https://download.pytorch.org/whl/cu128/torch-2.7.1%2Bcu128-cp310-cp310-win_amd64.whl", hash = "sha256:5174f02de8ca14df87c8e333c4c39cf3ce93a323c9d470d690301d110a053b3c" }, - { url = "https://download.pytorch.org/whl/cu128/torch-2.7.1%2Bcu128-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:3a0954c54fd7cb9f45beab1272dece2a05b0e77023c1da33ba32a7919661260f" }, - { url = "https://download.pytorch.org/whl/cu128/torch-2.7.1%2Bcu128-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:c301dc280458afd95450af794924c98fe07522dd148ff384739b810e3e3179f2" }, - { url = "https://download.pytorch.org/whl/cu128/torch-2.7.1%2Bcu128-cp311-cp311-win_amd64.whl", hash = "sha256:138c66dcd0ed2f07aafba3ed8b7958e2bed893694990e0b4b55b6b2b4a336aa6" }, - { url = "https://download.pytorch.org/whl/cu128/torch-2.7.1%2Bcu128-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:268e54db9f0bc2b7b9eb089852d3e592c2dea2facc3db494100c3d3b796549fa" }, - { url = "https://download.pytorch.org/whl/cu128/torch-2.7.1%2Bcu128-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:0b64f7d0a6f2a739ed052ba959f7b67c677028c9566ce51997f9f90fe573ddaa" }, - { url = "https://download.pytorch.org/whl/cu128/torch-2.7.1%2Bcu128-cp312-cp312-win_amd64.whl", hash = "sha256:2bb8c05d48ba815b316879a18195d53a6472a03e297d971e916753f8e1053d30" }, + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", +] +dependencies = [ + { name = "filelock", marker = "extra == 'extra-16-inference-models-torch-cu126' or extra == 'extra-16-inference-models-torch-cu130' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "fsspec", version = "2026.2.0", source = { registry = "https://pypi.org/simple" }, marker = "(extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu128')" }, + { name = "fsspec", version = "2026.3.0", source = { registry = "https://pypi.org/simple" }, marker = "extra == 'extra-16-inference-models-torch-cu126' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "jinja2", marker = "extra == 'extra-16-inference-models-torch-cu126' or extra == 'extra-16-inference-models-torch-cu130' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "networkx", version = "3.4.2", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "networkx", version = "3.6.1", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "setuptools", marker = "extra == 'extra-16-inference-models-torch-cu126' or extra == 'extra-16-inference-models-torch-cu130' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "sympy", version = "1.14.0", source = { registry = "https://pypi.org/simple" }, marker = "extra == 'extra-16-inference-models-torch-cu126' or extra == 'extra-16-inference-models-torch-cu130' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "typing-extensions", marker = "extra == 'extra-16-inference-models-torch-cu126' or extra == 'extra-16-inference-models-torch-cu130' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-jp6-cu126')" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/ac/f2/c1690994afe461aae2d0cac62251e6802a703dec0a6c549c02ecd0de92a9/torch-2.11.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2c0d7fcfbc0c4e8bb5ebc3907cbc0c6a0da1b8f82b1fc6e14e914fa0b9baf74e", size = 80526521, upload-time = "2026-03-23T18:12:06.86Z" }, + { url = "https://files.pythonhosted.org/packages/a4/f0/98ae802fa8c09d3149b0c8690741f3f5753c90e779bd28c9613257295945/torch-2.11.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:4cf8687f4aec3900f748d553483ef40e0ac38411c3c48d0a86a438f6d7a99b18", size = 419723025, upload-time = "2026-03-23T18:11:43.774Z" }, + { url = "https://files.pythonhosted.org/packages/f9/1e/18a9b10b4bd34f12d4e561c52b0ae7158707b8193c6cfc0aad2b48167090/torch-2.11.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:1b32ceda909818a03b112006709b02be1877240c31750a8d9c6b7bf5f2d8a6e5", size = 530589207, upload-time = "2026-03-23T18:11:23.756Z" }, + { url = "https://files.pythonhosted.org/packages/35/40/2d532e8c0e23705be9d1debce5bc37b68d59a39bda7584c26fe9668076fe/torch-2.11.0-cp310-cp310-win_amd64.whl", hash = "sha256:b3c712ae6fb8e7a949051a953fc412fe0a6940337336c3b6f905e905dac5157f", size = 114518313, upload-time = "2026-03-23T18:11:58.281Z" }, + { url = "https://files.pythonhosted.org/packages/ae/0d/98b410492609e34a155fa8b121b55c7dca229f39636851c3a9ec20edea21/torch-2.11.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7b6a60d48062809f58595509c524b88e6ddec3ebe25833d6462eeab81e5f2ce4", size = 80529712, upload-time = "2026-03-23T18:12:02.608Z" }, + { url = "https://files.pythonhosted.org/packages/84/03/acea680005f098f79fd70c1d9d5ccc0cb4296ec2af539a0450108232fc0c/torch-2.11.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:d91aac77f24082809d2c5a93f52a5f085032740a1ebc9252a7b052ef5a4fddc6", size = 419718178, upload-time = "2026-03-23T18:10:46.675Z" }, + { url = "https://files.pythonhosted.org/packages/8c/8b/d7be22fbec9ffee6cff31a39f8750d4b3a65d349a286cf4aec74c2375662/torch-2.11.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:7aa2f9bbc6d4595ba72138026b2074be1233186150e9292865e04b7a63b8c67a", size = 530604548, upload-time = "2026-03-23T18:10:03.569Z" }, + { url = "https://files.pythonhosted.org/packages/d1/bd/9912d30b68845256aabbb4a40aeefeef3c3b20db5211ccda653544ada4b6/torch-2.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:73e24aaf8f36ab90d95cd1761208b2eb70841c2a9ca1a3f9061b39fc5331b708", size = 114519675, upload-time = "2026-03-23T18:11:52.995Z" }, + { url = "https://files.pythonhosted.org/packages/6f/8b/69e3008d78e5cee2b30183340cc425081b78afc5eff3d080daab0adda9aa/torch-2.11.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4b5866312ee6e52ea625cd211dcb97d6a2cdc1131a5f15cc0d87eec948f6dd34", size = 80606338, upload-time = "2026-03-23T18:11:34.781Z" }, + { url = "https://files.pythonhosted.org/packages/13/16/42e5915ebe4868caa6bac83a8ed59db57f12e9a61b7d749d584776ed53d5/torch-2.11.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:f99924682ef0aa6a4ab3b1b76f40dc6e273fca09f367d15a524266db100a723f", size = 419731115, upload-time = "2026-03-23T18:11:06.944Z" }, + { url = "https://files.pythonhosted.org/packages/1a/c9/82638ef24d7877510f83baf821f5619a61b45568ce21c0a87a91576510aa/torch-2.11.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:0f68f4ac6d95d12e896c3b7a912b5871619542ec54d3649cf48cc1edd4dd2756", size = 530712279, upload-time = "2026-03-23T18:10:31.481Z" }, + { url = "https://files.pythonhosted.org/packages/1c/ff/6756f1c7ee302f6d202120e0f4f05b432b839908f9071157302cedfc5232/torch-2.11.0-cp312-cp312-win_amd64.whl", hash = "sha256:fbf39280699d1b869f55eac536deceaa1b60bd6788ba74f399cc67e60a5fab10", size = 114556047, upload-time = "2026-03-23T18:10:55.931Z" }, ] [[package]] name = "torch" -version = "2.8.0" -source = { registry = "https://pypi.jetson-ai-lab.io/jp6/cu126/+simple" } +version = "2.11.0+cpu" +source = { registry = "https://download.pytorch.org/whl/cpu" } resolution-markers = [ - "python_full_version >= '3.12' and sys_platform == 'darwin'", "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux'", - "(python_full_version >= '3.12' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.12' and sys_platform != 'darwin' and sys_platform != 'linux')", - "python_full_version == '3.11.*' and sys_platform == 'darwin'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin'", "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux'", - "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.11.*' and sys_platform != 'darwin' and sys_platform != 'linux')", - "python_full_version < '3.11' and sys_platform == 'darwin'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin'", "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux'", - "(python_full_version < '3.11' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform != 'darwin' and sys_platform != 'linux')", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin'", ] dependencies = [ - { name = "filelock", marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "fsspec", marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "jinja2", marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "networkx", version = "3.4.2", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "networkx", version = "3.5", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "setuptools", marker = "(python_full_version >= '3.12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "sympy", version = "1.14.0", source = { registry = "https://pypi.org/simple" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "typing-extensions", marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, -] -wheels = [ - { url = "https://pypi.jetson-ai-lab.io/jp6/cu126/+f/62a/1beee9f2f1470/torch-2.8.0-cp310-cp310-linux_aarch64.whl", hash = "sha256:62a1beee9f2f147076a974d2942c90060c12771c94740830327cae705b2595fc" }, + { name = "filelock", marker = "(sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "fsspec", version = "2026.3.0", source = { registry = "https://pypi.org/simple" }, marker = "(sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "jinja2", marker = "(sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "networkx", version = "3.4.2", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "networkx", version = "3.6.1", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "setuptools", marker = "(sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "sympy", version = "1.14.0", source = { registry = "https://pypi.org/simple" }, marker = "(sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "typing-extensions", marker = "(sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, +] +wheels = [ + { url = "https://download-r2.pytorch.org/whl/cpu/torch-2.11.0%2Bcpu-cp310-cp310-linux_s390x.whl" }, + { url = "https://download-r2.pytorch.org/whl/cpu/torch-2.11.0%2Bcpu-cp310-cp310-manylinux_2_28_aarch64.whl" }, + { url = "https://download-r2.pytorch.org/whl/cpu/torch-2.11.0%2Bcpu-cp310-cp310-manylinux_2_28_x86_64.whl" }, + { url = "https://download-r2.pytorch.org/whl/cpu/torch-2.11.0%2Bcpu-cp310-cp310-win_amd64.whl" }, + { url = "https://download-r2.pytorch.org/whl/cpu/torch-2.11.0%2Bcpu-cp311-cp311-linux_s390x.whl" }, + { url = "https://download-r2.pytorch.org/whl/cpu/torch-2.11.0%2Bcpu-cp311-cp311-manylinux_2_28_aarch64.whl" }, + { url = "https://download-r2.pytorch.org/whl/cpu/torch-2.11.0%2Bcpu-cp311-cp311-manylinux_2_28_x86_64.whl" }, + { url = "https://download-r2.pytorch.org/whl/cpu/torch-2.11.0%2Bcpu-cp311-cp311-win_amd64.whl" }, + { url = "https://download-r2.pytorch.org/whl/cpu/torch-2.11.0%2Bcpu-cp312-cp312-linux_s390x.whl" }, + { url = "https://download-r2.pytorch.org/whl/cpu/torch-2.11.0%2Bcpu-cp312-cp312-manylinux_2_28_aarch64.whl" }, + { url = "https://download-r2.pytorch.org/whl/cpu/torch-2.11.0%2Bcpu-cp312-cp312-manylinux_2_28_x86_64.whl" }, + { url = "https://download-r2.pytorch.org/whl/cpu/torch-2.11.0%2Bcpu-cp312-cp312-win_amd64.whl" }, ] [[package]] name = "torch" -version = "2.8.0" -source = { registry = "https://pypi.org/simple" } +version = "2.11.0+cu128" +source = { registry = "https://download.pytorch.org/whl/cu128" } resolution-markers = [ - "python_full_version >= '3.12' and sys_platform == 'darwin'", - "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux'", - "(python_full_version >= '3.12' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.12' and sys_platform != 'darwin' and sys_platform != 'linux')", - "python_full_version == '3.11.*' and sys_platform == 'darwin'", - "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux'", - "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.11.*' and sys_platform != 'darwin' and sys_platform != 'linux')", - "python_full_version < '3.11' and sys_platform == 'darwin'", - "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux'", - "(python_full_version < '3.11' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform != 'darwin' and sys_platform != 'linux')", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", ] dependencies = [ - { name = "filelock" }, - { name = "fsspec" }, - { name = "jinja2" }, - { name = "networkx", version = "3.4.2", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "networkx", version = "3.5", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "setuptools", marker = "python_full_version >= '3.12' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126')" }, - { name = "sympy", version = "1.14.0", source = { registry = "https://pypi.org/simple" } }, - { name = "typing-extensions" }, -] -wheels = [ - { url = "https://files.pythonhosted.org/packages/63/28/110f7274254f1b8476c561dada127173f994afa2b1ffc044efb773c15650/torch-2.8.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:0be92c08b44009d4131d1ff7a8060d10bafdb7ddcb7359ef8d8c5169007ea905", size = 102052793, upload-time = "2025-08-06T14:53:15.852Z" }, - { url = "https://files.pythonhosted.org/packages/70/1c/58da560016f81c339ae14ab16c98153d51c941544ae568da3cb5b1ceb572/torch-2.8.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:89aa9ee820bb39d4d72b794345cccef106b574508dd17dbec457949678c76011", size = 888025420, upload-time = "2025-08-06T14:54:18.014Z" }, - { url = "https://files.pythonhosted.org/packages/70/87/f69752d0dd4ba8218c390f0438130c166fa264a33b7025adb5014b92192c/torch-2.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:e8e5bf982e87e2b59d932769938b698858c64cc53753894be25629bdf5cf2f46", size = 241363614, upload-time = "2025-08-06T14:53:31.496Z" }, - { url = "https://files.pythonhosted.org/packages/ef/d6/e6d4c57e61c2b2175d3aafbfb779926a2cfd7c32eeda7c543925dceec923/torch-2.8.0-cp310-none-macosx_11_0_arm64.whl", hash = "sha256:a3f16a58a9a800f589b26d47ee15aca3acf065546137fc2af039876135f4c760", size = 73611154, upload-time = "2025-08-06T14:53:10.919Z" }, - { url = "https://files.pythonhosted.org/packages/8f/c4/3e7a3887eba14e815e614db70b3b529112d1513d9dae6f4d43e373360b7f/torch-2.8.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:220a06fd7af8b653c35d359dfe1aaf32f65aa85befa342629f716acb134b9710", size = 102073391, upload-time = "2025-08-06T14:53:20.937Z" }, - { url = "https://files.pythonhosted.org/packages/5a/63/4fdc45a0304536e75a5e1b1bbfb1b56dd0e2743c48ee83ca729f7ce44162/torch-2.8.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:c12fa219f51a933d5f80eeb3a7a5d0cbe9168c0a14bbb4055f1979431660879b", size = 888063640, upload-time = "2025-08-06T14:55:05.325Z" }, - { url = "https://files.pythonhosted.org/packages/84/57/2f64161769610cf6b1c5ed782bd8a780e18a3c9d48931319f2887fa9d0b1/torch-2.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:8c7ef765e27551b2fbfc0f41bcf270e1292d9bf79f8e0724848b1682be6e80aa", size = 241366752, upload-time = "2025-08-06T14:53:38.692Z" }, - { url = "https://files.pythonhosted.org/packages/a4/5e/05a5c46085d9b97e928f3f037081d3d2b87fb4b4195030fc099aaec5effc/torch-2.8.0-cp311-none-macosx_11_0_arm64.whl", hash = "sha256:5ae0524688fb6707c57a530c2325e13bb0090b745ba7b4a2cd6a3ce262572916", size = 73621174, upload-time = "2025-08-06T14:53:25.44Z" }, - { url = "https://files.pythonhosted.org/packages/49/0c/2fd4df0d83a495bb5e54dca4474c4ec5f9c62db185421563deeb5dabf609/torch-2.8.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:e2fab4153768d433f8ed9279c8133a114a034a61e77a3a104dcdf54388838705", size = 101906089, upload-time = "2025-08-06T14:53:52.631Z" }, - { url = "https://files.pythonhosted.org/packages/99/a8/6acf48d48838fb8fe480597d98a0668c2beb02ee4755cc136de92a0a956f/torch-2.8.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:b2aca0939fb7e4d842561febbd4ffda67a8e958ff725c1c27e244e85e982173c", size = 887913624, upload-time = "2025-08-06T14:56:44.33Z" }, - { url = "https://files.pythonhosted.org/packages/af/8a/5c87f08e3abd825c7dfecef5a0f1d9aa5df5dd0e3fd1fa2f490a8e512402/torch-2.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:2f4ac52f0130275d7517b03a33d2493bab3693c83dcfadf4f81688ea82147d2e", size = 241326087, upload-time = "2025-08-06T14:53:46.503Z" }, - { url = "https://files.pythonhosted.org/packages/be/66/5c9a321b325aaecb92d4d1855421e3a055abd77903b7dab6575ca07796db/torch-2.8.0-cp312-none-macosx_11_0_arm64.whl", hash = "sha256:619c2869db3ada2c0105487ba21b5008defcc472d23f8b80ed91ac4a380283b0", size = 73630478, upload-time = "2025-08-06T14:53:57.144Z" }, + { name = "cuda-bindings", marker = "(sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "cuda-toolkit", extra = ["cublas", "cudart", "cufft", "cufile", "cupti", "curand", "cusolver", "cusparse", "nvjitlink", "nvrtc", "nvtx"], marker = "(sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "filelock", marker = "extra == 'extra-16-inference-models-torch-cu128' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "fsspec", version = "2026.2.0", source = { registry = "https://pypi.org/simple" }, marker = "(extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "fsspec", version = "2026.3.0", source = { registry = "https://pypi.org/simple" }, marker = "(extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128')" }, + { name = "jinja2", marker = "extra == 'extra-16-inference-models-torch-cu128' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "networkx", version = "3.4.2", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "networkx", version = "3.6.1", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "nvidia-cudnn-cu12", version = "9.19.0.56", source = { registry = "https://pypi.org/simple" }, marker = "(sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "nvidia-cusparselt-cu12", version = "0.7.1", source = { registry = "https://pypi.org/simple" }, marker = "(sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "nvidia-nccl-cu12", version = "2.28.9", source = { registry = "https://pypi.org/simple" }, marker = "(sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "nvidia-nvshmem-cu12", marker = "(sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "setuptools", marker = "extra == 'extra-16-inference-models-torch-cu128' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "sympy", version = "1.14.0", source = { registry = "https://pypi.org/simple" }, marker = "extra == 'extra-16-inference-models-torch-cu128' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "triton", version = "3.6.0", source = { registry = "https://pypi.org/simple" }, marker = "(sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "typing-extensions", marker = "extra == 'extra-16-inference-models-torch-cu128' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, +] +wheels = [ + { url = "https://download-r2.pytorch.org/whl/cu128/torch-2.11.0%2Bcu128-cp310-cp310-manylinux_2_28_aarch64.whl" }, + { url = "https://download-r2.pytorch.org/whl/cu128/torch-2.11.0%2Bcu128-cp310-cp310-manylinux_2_28_x86_64.whl" }, + { url = "https://download-r2.pytorch.org/whl/cu128/torch-2.11.0%2Bcu128-cp310-cp310-win_amd64.whl" }, + { url = "https://download-r2.pytorch.org/whl/cu128/torch-2.11.0%2Bcu128-cp311-cp311-manylinux_2_28_aarch64.whl" }, + { url = "https://download-r2.pytorch.org/whl/cu128/torch-2.11.0%2Bcu128-cp311-cp311-manylinux_2_28_x86_64.whl" }, + { url = "https://download-r2.pytorch.org/whl/cu128/torch-2.11.0%2Bcu128-cp311-cp311-win_amd64.whl" }, + { url = "https://download-r2.pytorch.org/whl/cu128/torch-2.11.0%2Bcu128-cp312-cp312-manylinux_2_28_aarch64.whl" }, + { url = "https://download-r2.pytorch.org/whl/cu128/torch-2.11.0%2Bcu128-cp312-cp312-manylinux_2_28_x86_64.whl" }, + { url = "https://download-r2.pytorch.org/whl/cu128/torch-2.11.0%2Bcu128-cp312-cp312-win_amd64.whl" }, ] [[package]] @@ -4837,20 +15480,28 @@ name = "torchvision" version = "0.21.0+cu124" source = { registry = "https://download.pytorch.org/whl/cu124" } resolution-markers = [ - "python_full_version >= '3.12' and sys_platform == 'darwin'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin'", "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux'", - "(python_full_version >= '3.12' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.12' and sys_platform != 'darwin' and sys_platform != 'linux')", - "python_full_version == '3.11.*' and sys_platform == 'darwin'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin'", "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux'", - "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.11.*' and sys_platform != 'darwin' and sys_platform != 'linux')", - "python_full_version < '3.11' and sys_platform == 'darwin'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin'", "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux'", - "(python_full_version < '3.11' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform != 'darwin' and sys_platform != 'linux')", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin'", ] dependencies = [ - { name = "numpy", marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "pillow", marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torch", version = "2.6.0+cu124", source = { registry = "https://download.pytorch.org/whl/cu124" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "1.26.4", source = { registry = "https://pypi.org/simple" }, marker = "(extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "2.4.4", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "pillow", marker = "extra == 'extra-16-inference-models-torch-cu124' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torch", version = "2.6.0+cu124", source = { registry = "https://download.pytorch.org/whl/cu124" }, marker = "extra == 'extra-16-inference-models-torch-cu124' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, ] wheels = [ { url = "https://download-r2.pytorch.org/whl/cu124/torchvision-0.21.0%2Bcu124-cp310-cp310-linux_x86_64.whl", hash = "sha256:3d3e74018eaa7837c73e3764dad3b7792b7544401c25a42977e9744303731bd3" }, @@ -4863,217 +15514,786 @@ wheels = [ [[package]] name = "torchvision" -version = "0.22.1" -source = { registry = "https://download.pytorch.org/whl/cpu" } -resolution-markers = [ - "python_full_version >= '3.12' and sys_platform == 'darwin'", - "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux'", - "python_full_version == '3.11.*' and sys_platform == 'darwin'", - "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux'", - "python_full_version < '3.11' and sys_platform == 'darwin'", - "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux'", -] -dependencies = [ - { name = "numpy", marker = "(platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "pillow", marker = "(platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torch", version = "2.7.1", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torch", version = "2.7.1+cpu", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, -] -wheels = [ - { url = "https://download-r2.pytorch.org/whl/cpu/torchvision-0.22.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3b47d8369ee568c067795c0da0b4078f39a9dfea6f3bc1f3ac87530dfda1dd56" }, - { url = "https://download-r2.pytorch.org/whl/cpu/torchvision-0.22.1-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:990de4d657a41ed71680cd8be2e98ebcab55371f30993dc9bd2e676441f7180e" }, - { url = "https://download-r2.pytorch.org/whl/cpu/torchvision-0.22.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4addf626e2b57fc22fd6d329cf1346d474497672e6af8383b7b5b636fba94a53" }, - { url = "https://download-r2.pytorch.org/whl/cpu/torchvision-0.22.1-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:8b4a53a6067d63adba0c52f2b8dd2290db649d642021674ee43c0c922f0c6a69" }, - { url = "https://download-r2.pytorch.org/whl/cpu/torchvision-0.22.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:153f1790e505bd6da123e21eee6e83e2e155df05c0fe7d56347303067d8543c5" }, - { url = "https://download-r2.pytorch.org/whl/cpu/torchvision-0.22.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:964414eef19459d55a10e886e2fca50677550e243586d1678f65e3f6f6bac47a" }, -] - -[[package]] -name = "torchvision" -version = "0.22.1" -source = { registry = "https://download.pytorch.org/whl/cu128" } +version = "0.22.1+cu118" +source = { registry = "https://download.pytorch.org/whl/cu118" } resolution-markers = [ + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin'", "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin'", "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin'", "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin'", ] dependencies = [ - { name = "numpy", marker = "(platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "pillow", marker = "(platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torch", version = "2.7.1+cu128", source = { registry = "https://download.pytorch.org/whl/cu128" }, marker = "(platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "1.26.4", source = { registry = "https://pypi.org/simple" }, marker = "(extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "2.4.4", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "pillow", marker = "extra == 'extra-16-inference-models-torch-cu118' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torch", version = "2.7.1+cu118", source = { registry = "https://download.pytorch.org/whl/cu118" }, marker = "extra == 'extra-16-inference-models-torch-cu118' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, ] wheels = [ - { url = "https://download-r2.pytorch.org/whl/cu128/torchvision-0.22.1-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:3a41b87628c0d6095839c43a1dd706670e7e5a56edc5860e700e1ba22c3ef8af" }, - { url = "https://download-r2.pytorch.org/whl/cu128/torchvision-0.22.1-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:299fbd935f02b424e9166b7689de57067e24a228edc00abd5faf84f86c0643a0" }, - { url = "https://download-r2.pytorch.org/whl/cu128/torchvision-0.22.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:2ad7fe412b821333fc05b4046263d77c14ba86f3965366adbada8dc397ea45b4" }, + { url = "https://download-r2.pytorch.org/whl/cu118/torchvision-0.22.1%2Bcu118-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:64954400493caf614b5aebded9b5a63eda8753cea9f8a1e32752eb2adc6ed383" }, + { url = "https://download-r2.pytorch.org/whl/cu118/torchvision-0.22.1%2Bcu118-cp310-cp310-win_amd64.whl", hash = "sha256:608ea6cb93eb66cdf273cab41122f83191c198c3c4ff583609f0e6e184f52865" }, + { url = "https://download-r2.pytorch.org/whl/cu118/torchvision-0.22.1%2Bcu118-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:6dd3d825fb4a75eae887665d1da812a360d69273118bfa17616c836bfb466627" }, + { url = "https://download-r2.pytorch.org/whl/cu118/torchvision-0.22.1%2Bcu118-cp311-cp311-win_amd64.whl", hash = "sha256:98a626661670cd96faeef05b33a140a09f9f134b0ab38d127b6ebbcf73d42bf5" }, + { url = "https://download-r2.pytorch.org/whl/cu118/torchvision-0.22.1%2Bcu118-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:e4a8696261b8c64297a2683656f4ee64418d7ee391627702fe5c06b72ce81123" }, + { url = "https://download-r2.pytorch.org/whl/cu118/torchvision-0.22.1%2Bcu118-cp312-cp312-win_amd64.whl", hash = "sha256:3e927a3b0b08c7582cfa09e5f16b35435de390a612cfe76eed1418ab7b68d6b6" }, ] [[package]] name = "torchvision" -version = "0.22.1+cpu" +version = "0.26.0" source = { registry = "https://download.pytorch.org/whl/cpu" } resolution-markers = [ - "(python_full_version >= '3.12' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.12' and sys_platform != 'darwin' and sys_platform != 'linux')", - "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.11.*' and sys_platform != 'darwin' and sys_platform != 'linux')", - "(python_full_version < '3.11' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform != 'darwin' and sys_platform != 'linux')", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin'", ] dependencies = [ - { name = "numpy", marker = "(platform_machine != 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "pillow", marker = "(platform_machine != 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torch", version = "2.7.1+cpu", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(platform_machine != 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "1.26.4", source = { registry = "https://pypi.org/simple" }, marker = "(sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "2.4.4", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "pillow", marker = "(sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torch", version = "2.11.0", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, ] wheels = [ - { url = "https://download-r2.pytorch.org/whl/cpu/torchvision-0.22.1%2Bcpu-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:e31f1273a8dd9760906288036ac3c8f5fef25eed393da0491db150d7be78910d" }, - { url = "https://download-r2.pytorch.org/whl/cpu/torchvision-0.22.1%2Bcpu-cp310-cp310-win_amd64.whl", hash = "sha256:445e442b94c365f7fd96596347c8a5a7fcfcbfca17a23baa8c9dcc8cb00fceee" }, - { url = "https://download-r2.pytorch.org/whl/cpu/torchvision-0.22.1%2Bcpu-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:4e0cbc165a472605d0c13da68ae22e84b17a6b815d5e600834777823e1bcb658" }, - { url = "https://download-r2.pytorch.org/whl/cpu/torchvision-0.22.1%2Bcpu-cp311-cp311-win_amd64.whl", hash = "sha256:9482adee074f60a45fd69892f7488281aadfda7836948c94b0a9b0caf55d1d67" }, - { url = "https://download-r2.pytorch.org/whl/cpu/torchvision-0.22.1%2Bcpu-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:b5fa7044bd82c6358e8229351c98070cf3a7bf4a6e89ea46352ae6c65745ef94" }, - { url = "https://download-r2.pytorch.org/whl/cpu/torchvision-0.22.1%2Bcpu-cp312-cp312-win_amd64.whl", hash = "sha256:433cb4dbced7291f17064cea08ac1e5aebd02ec190e1c207d117ad62a8961f2b" }, + { url = "https://download-r2.pytorch.org/whl/cpu/torchvision-0.26.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a06d4772a8e13e772906ed736cc53ec6639e5e60554f8e5fa6ca165aabebc464" }, + { url = "https://download-r2.pytorch.org/whl/cpu/torchvision-0.26.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:55bd6ad4ae77be01ba67a410b05b51f53b0d0ee45f146eb6a0dfb9007e70ab3c" }, + { url = "https://download-r2.pytorch.org/whl/cpu/torchvision-0.26.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c409e1c3fdebec7a3834465086dbda8bf7680eff79abf7fd2f10c6b59520a7a4" }, ] [[package]] name = "torchvision" -version = "0.22.1+cu118" -source = { registry = "https://download.pytorch.org/whl/cu118" } +version = "0.26.0" +source = { registry = "https://pypi.jetson-ai-lab.io/jp6/cu126/+simple" } resolution-markers = [ - "python_full_version >= '3.12' and sys_platform == 'darwin'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin'", "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux'", - "(python_full_version >= '3.12' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.12' and sys_platform != 'darwin' and sys_platform != 'linux')", - "python_full_version == '3.11.*' and sys_platform == 'darwin'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin'", "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux'", - "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.11.*' and sys_platform != 'darwin' and sys_platform != 'linux')", - "python_full_version < '3.11' and sys_platform == 'darwin'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin'", "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux'", - "(python_full_version < '3.11' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform != 'darwin' and sys_platform != 'linux')", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin'", ] dependencies = [ - { name = "numpy", marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "pillow", marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torch", version = "2.7.1+cu118", source = { registry = "https://download.pytorch.org/whl/cu118" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "1.26.4", source = { registry = "https://pypi.org/simple" }, marker = "extra == 'extra-16-inference-models-torch-jp6-cu126' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126')" }, + { name = "pillow", marker = "extra == 'extra-16-inference-models-torch-jp6-cu126' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126')" }, + { name = "torch", version = "2.11.0", source = { registry = "https://pypi.jetson-ai-lab.io/jp6/cu126/+simple" }, marker = "extra == 'extra-16-inference-models-torch-jp6-cu126' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126')" }, ] wheels = [ - { url = "https://download-r2.pytorch.org/whl/cu118/torchvision-0.22.1%2Bcu118-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:64954400493caf614b5aebded9b5a63eda8753cea9f8a1e32752eb2adc6ed383" }, - { url = "https://download-r2.pytorch.org/whl/cu118/torchvision-0.22.1%2Bcu118-cp310-cp310-win_amd64.whl", hash = "sha256:608ea6cb93eb66cdf273cab41122f83191c198c3c4ff583609f0e6e184f52865" }, - { url = "https://download-r2.pytorch.org/whl/cu118/torchvision-0.22.1%2Bcu118-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:6dd3d825fb4a75eae887665d1da812a360d69273118bfa17616c836bfb466627" }, - { url = "https://download-r2.pytorch.org/whl/cu118/torchvision-0.22.1%2Bcu118-cp311-cp311-win_amd64.whl", hash = "sha256:98a626661670cd96faeef05b33a140a09f9f134b0ab38d127b6ebbcf73d42bf5" }, - { url = "https://download-r2.pytorch.org/whl/cu118/torchvision-0.22.1%2Bcu118-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:e4a8696261b8c64297a2683656f4ee64418d7ee391627702fe5c06b72ce81123" }, - { url = "https://download-r2.pytorch.org/whl/cu118/torchvision-0.22.1%2Bcu118-cp312-cp312-win_amd64.whl", hash = "sha256:3e927a3b0b08c7582cfa09e5f16b35435de390a612cfe76eed1418ab7b68d6b6" }, + { url = "https://pypi.jetson-ai-lab.io/jp6/cu126/+f/d11/6f08d3d62417d/torchvision-0.26.0-cp310-cp310-linux_aarch64.whl", hash = "sha256:d116f08d3d62417dc36cbf41923e491b8b49b77399575f0878ae391e6e77a159" }, ] [[package]] name = "torchvision" -version = "0.22.1+cu128" -source = { registry = "https://download.pytorch.org/whl/cu128" } +version = "0.26.0" +source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "python_full_version >= '3.12' and sys_platform == 'darwin'", - "(python_full_version >= '3.12' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.12' and sys_platform != 'darwin' and sys_platform != 'linux')", - "python_full_version == '3.11.*' and sys_platform == 'darwin'", - "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.11.*' and sys_platform != 'darwin' and sys_platform != 'linux')", - "python_full_version < '3.11' and sys_platform == 'darwin'", - "(python_full_version < '3.11' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform != 'darwin' and sys_platform != 'linux')", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", ] dependencies = [ - { name = "numpy", marker = "(platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "pillow", marker = "(platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torch", version = "2.7.1+cu128", source = { registry = "https://download.pytorch.org/whl/cu128" }, marker = "(platform_machine != 'aarch64' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "1.26.4", source = { registry = "https://pypi.org/simple" }, marker = "(extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "2.4.4", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "pillow", marker = "extra == 'extra-16-inference-models-torch-cu126' or extra == 'extra-16-inference-models-torch-cu130' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torch", version = "2.11.0", source = { registry = "https://pypi.org/simple" }, marker = "extra == 'extra-16-inference-models-torch-cu126' or extra == 'extra-16-inference-models-torch-cu130' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-jp6-cu126')" }, ] wheels = [ - { url = "https://download-r2.pytorch.org/whl/cu128/torchvision-0.22.1%2Bcu128-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:538f4db667286d939b4eee0a66d31ed21b51186668006b0e0ffe20338ecc7e00" }, - { url = "https://download-r2.pytorch.org/whl/cu128/torchvision-0.22.1%2Bcu128-cp310-cp310-win_amd64.whl", hash = "sha256:ad48ba3c3ffd48027e3a8de42fcea131a53a524ee9416ca4efb22f9ac6b7328d" }, - { url = "https://download-r2.pytorch.org/whl/cu128/torchvision-0.22.1%2Bcu128-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:92568ac46b13a8c88b61589800b1b9c4629be091ea7ce080fc6fc622e11e0915" }, - { url = "https://download-r2.pytorch.org/whl/cu128/torchvision-0.22.1%2Bcu128-cp311-cp311-win_amd64.whl", hash = "sha256:85ecd729c947151eccea502853be6efc2c0029dc26e6e5148e04684aed008390" }, - { url = "https://download-r2.pytorch.org/whl/cu128/torchvision-0.22.1%2Bcu128-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:f64ef9bb91d71ab35d8384912a19f7419e35928685bc67544d58f45148334373" }, - { url = "https://download-r2.pytorch.org/whl/cu128/torchvision-0.22.1%2Bcu128-cp312-cp312-win_amd64.whl", hash = "sha256:650561ba326d21021243f5e064133dc62dc64d52f79623db5cd76637a9665f96" }, + { url = "https://files.pythonhosted.org/packages/74/b4/cdfee31e0402ea035135462cb0ab496e974d56fab6b4e7a1f0cbccb8cd28/torchvision-0.26.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a06d4772a8e13e772906ed736cc53ec6639e5e60554f8e5fa6ca165aabebc464", size = 1863503, upload-time = "2026-03-23T18:13:01.384Z" }, + { url = "https://files.pythonhosted.org/packages/e4/74/11fee109841e80ad14e5ca2d80bff6b10eb11b7838ff06f35bfeaa9f7251/torchvision-0.26.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:2adfbe438473236191ff077a4a9a0c767436879c89628aa97137e959b0c11a94", size = 7766423, upload-time = "2026-03-23T18:12:56.049Z" }, + { url = "https://files.pythonhosted.org/packages/5e/00/24d8c7845c3f270153fb81395a5135b2778e2538e81d14c6aea5106c689c/torchvision-0.26.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:b6f9ad1ecc0eab52647298b379ee9426845f8903703e6127973f8f3d049a798b", size = 7518249, upload-time = "2026-03-23T18:12:51.743Z" }, + { url = "https://files.pythonhosted.org/packages/d7/ed/e53cd7c0da7ae002e5e929c1796ebbe7ec0c700c29f7a0a6696497fb3d8b/torchvision-0.26.0-cp310-cp310-win_amd64.whl", hash = "sha256:f13f12b3791a266de2d599cb8162925261622a037d87fc03132848343cf68f75", size = 3669784, upload-time = "2026-03-23T18:12:49.949Z" }, + { url = "https://files.pythonhosted.org/packages/b4/bd/d552a2521bade3295b2c6e7a4a0d1022261cab7ca7011f4e2a330dbb3caa/torchvision-0.26.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:55bd6ad4ae77be01ba67a410b05b51f53b0d0ee45f146eb6a0dfb9007e70ab3c", size = 1863499, upload-time = "2026-03-23T18:12:58.696Z" }, + { url = "https://files.pythonhosted.org/packages/33/bf/21b899792b08cae7a298551c68398a79e333697479ed311b3b067aab4bdc/torchvision-0.26.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:1c55dc8affbcc0eb2060fbabbe996ae9e5839b24bb6419777f17848945a411b1", size = 7767527, upload-time = "2026-03-23T18:12:44.348Z" }, + { url = "https://files.pythonhosted.org/packages/9a/45/57bbf9e216850d065e66dd31a50f57424b607f1d878ab8956e56a1f4e36b/torchvision-0.26.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:fd10b5f994c210f4f6d6761cf686f82d748554adf486cb0979770c3252868c8f", size = 7519925, upload-time = "2026-03-23T18:12:53.283Z" }, + { url = "https://files.pythonhosted.org/packages/10/58/ed8f7754299f3e91d6414b6dc09f62b3fa7c6e5d63dfe48d69ab81498a37/torchvision-0.26.0-cp311-cp311-win_amd64.whl", hash = "sha256:de6424b12887ad884f39a0ee446994ae3cd3b6a00a9cafe1bead85a031132af0", size = 3983834, upload-time = "2026-03-23T18:13:00.224Z" }, + { url = "https://files.pythonhosted.org/packages/ae/e7/56b47cc3b132aea90ccce22bcb8975dec688b002150012acc842846039d0/torchvision-0.26.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c409e1c3fdebec7a3834465086dbda8bf7680eff79abf7fd2f10c6b59520a7a4", size = 1863502, upload-time = "2026-03-23T18:12:57.326Z" }, + { url = "https://files.pythonhosted.org/packages/f4/ec/5c31c92c08b65662fe9604a4067ae8232582805949f11ddc042cebe818ed/torchvision-0.26.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:406557718e62fdf10f5706e88d8a5ec000f872da913bf629aab9297622585547", size = 7767944, upload-time = "2026-03-23T18:12:42.805Z" }, + { url = "https://files.pythonhosted.org/packages/f5/d8/cb6ccda1a1f35a6597645818641701207b3e8e13553e75fce5d86bac74b2/torchvision-0.26.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:d61a5abb6b42a0c0c311996c2ac4b83a94418a97182c83b055a2a4ae985e05aa", size = 7522205, upload-time = "2026-03-23T18:12:54.654Z" }, + { url = "https://files.pythonhosted.org/packages/1c/a9/c272623a0f735c35f0f6cd6dc74784d4f970e800cf063bb76687895a2ab9/torchvision-0.26.0-cp312-cp312-win_amd64.whl", hash = "sha256:7993c01648e7c61d191b018e84d38fe0825c8fcb2720cd0f37caf7ba14404aa1", size = 4255155, upload-time = "2026-03-23T18:12:32.652Z" }, ] [[package]] name = "torchvision" -version = "0.23.0" -source = { registry = "https://pypi.jetson-ai-lab.io/jp6/cu126/+simple" } +version = "0.26.0+cpu" +source = { registry = "https://download.pytorch.org/whl/cpu" } resolution-markers = [ - "python_full_version >= '3.12' and sys_platform == 'darwin'", "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux'", - "(python_full_version >= '3.12' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.12' and sys_platform != 'darwin' and sys_platform != 'linux')", - "python_full_version == '3.11.*' and sys_platform == 'darwin'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin'", "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux'", - "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.11.*' and sys_platform != 'darwin' and sys_platform != 'linux')", - "python_full_version < '3.11' and sys_platform == 'darwin'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin'", "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux'", - "(python_full_version < '3.11' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform != 'darwin' and sys_platform != 'linux')", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin'", ] dependencies = [ - { name = "numpy", marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "pillow", marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, - { name = "torch", version = "2.8.0", source = { registry = "https://pypi.jetson-ai-lab.io/jp6/cu126/+simple" }, marker = "(extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "1.26.4", source = { registry = "https://pypi.org/simple" }, marker = "(sys_platform != 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "2.4.4", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "pillow", marker = "(sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torch", version = "2.11.0+cpu", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(sys_platform != 'darwin' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, ] wheels = [ - { url = "https://pypi.jetson-ai-lab.io/jp6/cu126/+f/907/c4c1933789645/torchvision-0.23.0-cp310-cp310-linux_aarch64.whl", hash = "sha256:907c4c1933789645ebb20dd9181d40f8647978e6bd30086ae7b01febb937d2d1" }, + { url = "https://download-r2.pytorch.org/whl/cpu/torchvision-0.26.0%2Bcpu-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:97df9a8595dce256d2e6dd16bbcd1c68dd00eec712e37d4b6ec7985453ddc2aa" }, + { url = "https://download-r2.pytorch.org/whl/cpu/torchvision-0.26.0%2Bcpu-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:967048f44f5bfcd05afed1c4b595cb30b4419fa0e08c296be403c88e82396c30" }, + { url = "https://download-r2.pytorch.org/whl/cpu/torchvision-0.26.0%2Bcpu-cp310-cp310-win_amd64.whl", hash = "sha256:a6e9fac5bddb918b084201b206f0f322693cab6f76400abf47f5e202937ebf4d" }, + { url = "https://download-r2.pytorch.org/whl/cpu/torchvision-0.26.0%2Bcpu-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:c11e55041f6b84a6c4fb28981b901475aa81c38695ccec6ddfcc54c3fa9fac4f" }, + { url = "https://download-r2.pytorch.org/whl/cpu/torchvision-0.26.0%2Bcpu-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:7fcc4584e9f2f8914f898a01437f25b16222fb0bfb3fdeba59cb1b0c640b0995" }, + { url = "https://download-r2.pytorch.org/whl/cpu/torchvision-0.26.0%2Bcpu-cp311-cp311-win_amd64.whl", hash = "sha256:f0c80af8e2807d52f3d480d9828d550f097ad55589f28aab4d65471b3d636359" }, + { url = "https://download-r2.pytorch.org/whl/cpu/torchvision-0.26.0%2Bcpu-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:17f0b542331fc94230b4214c6d123f038af7330fd81019608c0d2402f3bc3079" }, + { url = "https://download-r2.pytorch.org/whl/cpu/torchvision-0.26.0%2Bcpu-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:cf547dc0975eb40bc3249be4ccbeb736597d2c3ece305b1c4e5b7a5dd7363567" }, + { url = "https://download-r2.pytorch.org/whl/cpu/torchvision-0.26.0%2Bcpu-cp312-cp312-win_amd64.whl", hash = "sha256:52aa8401850a9792e71a8a1e65ac004e2b23622a6b6fd278cd11179efbefc65b" }, ] [[package]] name = "torchvision" -version = "0.23.0" -source = { registry = "https://pypi.org/simple" } +version = "0.26.0+cu128" +source = { registry = "https://download.pytorch.org/whl/cu128" } resolution-markers = [ - "python_full_version >= '3.12' and sys_platform == 'darwin'", - "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux'", - "(python_full_version >= '3.12' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.12' and sys_platform != 'darwin' and sys_platform != 'linux')", - "python_full_version == '3.11.*' and sys_platform == 'darwin'", - "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux'", - "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.11.*' and sys_platform != 'darwin' and sys_platform != 'linux')", - "python_full_version < '3.11' and sys_platform == 'darwin'", - "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux'", - "(python_full_version < '3.11' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform != 'darwin' and sys_platform != 'linux')", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'emscripten' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine != 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform == 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", ] dependencies = [ - { name = "numpy" }, - { name = "pillow" }, - { name = "torch", version = "2.8.0", source = { registry = "https://pypi.org/simple" } }, + { name = "numpy", version = "1.26.4", source = { registry = "https://pypi.org/simple" }, marker = "(extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "2.4.4", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "pillow", marker = "extra == 'extra-16-inference-models-torch-cu128' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "torch", version = "2.11.0+cu128", source = { registry = "https://download.pytorch.org/whl/cu128" }, marker = "extra == 'extra-16-inference-models-torch-cu128' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, ] wheels = [ - { url = "https://files.pythonhosted.org/packages/4d/49/5ad5c3ff4920be0adee9eb4339b4fb3b023a0fc55b9ed8dbc73df92946b8/torchvision-0.23.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7266871daca00ad46d1c073e55d972179d12a58fa5c9adec9a3db9bbed71284a", size = 1856885, upload-time = "2025-08-06T14:57:55.024Z" }, - { url = "https://files.pythonhosted.org/packages/25/44/ddd56d1637bac42a8c5da2c8c440d8a28c431f996dd9790f32dd9a96ca6e/torchvision-0.23.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:31c583ba27426a3a04eca8c05450524105c1564db41be6632f7536ef405a6de2", size = 2394251, upload-time = "2025-08-06T14:58:01.725Z" }, - { url = "https://files.pythonhosted.org/packages/93/f3/3cdf55bbf0f737304d997561c34ab0176222e0496b6743b0feab5995182c/torchvision-0.23.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:3932bf67256f2d095ce90a9f826f6033694c818856f4bb26794cf2ce64253e53", size = 8627497, upload-time = "2025-08-06T14:58:09.317Z" }, - { url = "https://files.pythonhosted.org/packages/97/90/02afe57c3ef4284c5cf89d3b7ae203829b3a981f72b93a7dd2a3fd2c83c1/torchvision-0.23.0-cp310-cp310-win_amd64.whl", hash = "sha256:83ee5bf827d61a8af14620c0a61d8608558638ac9c3bac8adb7b27138e2147d1", size = 1600760, upload-time = "2025-08-06T14:57:56.783Z" }, - { url = "https://files.pythonhosted.org/packages/f0/d7/15d3d7bd8d0239211b21673d1bac7bc345a4ad904a8e25bb3fd8a9cf1fbc/torchvision-0.23.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:49aa20e21f0c2bd458c71d7b449776cbd5f16693dd5807195a820612b8a229b7", size = 1856884, upload-time = "2025-08-06T14:58:00.237Z" }, - { url = "https://files.pythonhosted.org/packages/dd/14/7b44fe766b7d11e064c539d92a172fa9689a53b69029e24f2f1f51e7dc56/torchvision-0.23.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:01dc33ee24c79148aee7cdbcf34ae8a3c9da1674a591e781577b716d233b1fa6", size = 2395543, upload-time = "2025-08-06T14:58:04.373Z" }, - { url = "https://files.pythonhosted.org/packages/79/9c/fcb09aff941c8147d9e6aa6c8f67412a05622b0c750bcf796be4c85a58d4/torchvision-0.23.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:35c27941831b653f5101edfe62c03d196c13f32139310519e8228f35eae0e96a", size = 8628388, upload-time = "2025-08-06T14:58:07.802Z" }, - { url = "https://files.pythonhosted.org/packages/93/40/3415d890eb357b25a8e0a215d32365a88ecc75a283f75c4e919024b22d97/torchvision-0.23.0-cp311-cp311-win_amd64.whl", hash = "sha256:09bfde260e7963a15b80c9e442faa9f021c7e7f877ac0a36ca6561b367185013", size = 1600741, upload-time = "2025-08-06T14:57:59.158Z" }, - { url = "https://files.pythonhosted.org/packages/df/1d/0ea0b34bde92a86d42620f29baa6dcbb5c2fc85990316df5cb8f7abb8ea2/torchvision-0.23.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e0e2c04a91403e8dd3af9756c6a024a1d9c0ed9c0d592a8314ded8f4fe30d440", size = 1856885, upload-time = "2025-08-06T14:58:06.503Z" }, - { url = "https://files.pythonhosted.org/packages/e2/00/2f6454decc0cd67158c7890364e446aad4b91797087a57a78e72e1a8f8bc/torchvision-0.23.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:6dd7c4d329a0e03157803031bc856220c6155ef08c26d4f5bbac938acecf0948", size = 2396614, upload-time = "2025-08-06T14:58:03.116Z" }, - { url = "https://files.pythonhosted.org/packages/e4/b5/3e580dcbc16f39a324f3dd71b90edbf02a42548ad44d2b4893cc92b1194b/torchvision-0.23.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:4e7d31c43bc7cbecbb1a5652ac0106b436aa66e26437585fc2c4b2cf04d6014c", size = 8627108, upload-time = "2025-08-06T14:58:12.956Z" }, - { url = "https://files.pythonhosted.org/packages/82/c1/c2fe6d61e110a8d0de2f94276899a2324a8f1e6aee559eb6b4629ab27466/torchvision-0.23.0-cp312-cp312-win_amd64.whl", hash = "sha256:a2e45272abe7b8bf0d06c405e78521b5757be1bd0ed7e5cd78120f7fdd4cbf35", size = 1600723, upload-time = "2025-08-06T14:57:57.986Z" }, + { url = "https://download-r2.pytorch.org/whl/cu128/torchvision-0.26.0%2Bcu128-cp310-cp310-manylinux_2_28_aarch64.whl" }, + { url = "https://download-r2.pytorch.org/whl/cu128/torchvision-0.26.0%2Bcu128-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:f44bfc61b9be80bcf52a762d34da363cea3125d10c01f37e271583803c7bb97b" }, + { url = "https://download-r2.pytorch.org/whl/cu128/torchvision-0.26.0%2Bcu128-cp310-cp310-win_amd64.whl" }, + { url = "https://download-r2.pytorch.org/whl/cu128/torchvision-0.26.0%2Bcu128-cp311-cp311-manylinux_2_28_aarch64.whl" }, + { url = "https://download-r2.pytorch.org/whl/cu128/torchvision-0.26.0%2Bcu128-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:8f2629d056570c929b0a1d5473d9cb0320b90bda1764bda353553a72cc6b2069" }, + { url = "https://download-r2.pytorch.org/whl/cu128/torchvision-0.26.0%2Bcu128-cp311-cp311-win_amd64.whl" }, + { url = "https://download-r2.pytorch.org/whl/cu128/torchvision-0.26.0%2Bcu128-cp312-cp312-manylinux_2_28_aarch64.whl" }, + { url = "https://download-r2.pytorch.org/whl/cu128/torchvision-0.26.0%2Bcu128-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:ccf26b4b659cfce6f2208cb8326071d51c70219a34856dfdf468d1e19af52c0d" }, + { url = "https://download-r2.pytorch.org/whl/cu128/torchvision-0.26.0%2Bcu128-cp312-cp312-win_amd64.whl" }, ] [[package]] name = "tornado" -version = "6.5.4" +version = "6.5.5" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/37/1d/0a336abf618272d53f62ebe274f712e213f5a03c0b2339575430b8362ef2/tornado-6.5.4.tar.gz", hash = "sha256:a22fa9047405d03260b483980635f0b041989d8bcc9a313f8fe18b411d84b1d7", size = 513632, upload-time = "2025-12-15T19:21:03.836Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f8/f1/3173dfa4a18db4a9b03e5d55325559dab51ee653763bb8745a75af491286/tornado-6.5.5.tar.gz", hash = "sha256:192b8f3ea91bd7f1f50c06955416ed76c6b72f96779b962f07f911b91e8d30e9", size = 516006, upload-time = "2026-03-10T21:31:02.067Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ab/a9/e94a9d5224107d7ce3cc1fab8d5dc97f5ea351ccc6322ee4fb661da94e35/tornado-6.5.4-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:d6241c1a16b1c9e4cc28148b1cda97dd1c6cb4fb7068ac1bedc610768dff0ba9", size = 443909, upload-time = "2025-12-15T19:20:48.382Z" }, - { url = "https://files.pythonhosted.org/packages/db/7e/f7b8d8c4453f305a51f80dbb49014257bb7d28ccb4bbb8dd328ea995ecad/tornado-6.5.4-cp39-abi3-macosx_10_9_x86_64.whl", hash = "sha256:2d50f63dda1d2cac3ae1fa23d254e16b5e38153758470e9956cbc3d813d40843", size = 442163, upload-time = "2025-12-15T19:20:49.791Z" }, - { url = "https://files.pythonhosted.org/packages/ba/b5/206f82d51e1bfa940ba366a8d2f83904b15942c45a78dd978b599870ab44/tornado-6.5.4-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1cf66105dc6acb5af613c054955b8137e34a03698aa53272dbda4afe252be17", size = 445746, upload-time = "2025-12-15T19:20:51.491Z" }, - { url = "https://files.pythonhosted.org/packages/8e/9d/1a3338e0bd30ada6ad4356c13a0a6c35fbc859063fa7eddb309183364ac1/tornado-6.5.4-cp39-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:50ff0a58b0dc97939d29da29cd624da010e7f804746621c78d14b80238669335", size = 445083, upload-time = "2025-12-15T19:20:52.778Z" }, - { url = "https://files.pythonhosted.org/packages/50/d4/e51d52047e7eb9a582da59f32125d17c0482d065afd5d3bc435ff2120dc5/tornado-6.5.4-cp39-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e5fb5e04efa54cf0baabdd10061eb4148e0be137166146fff835745f59ab9f7f", size = 445315, upload-time = "2025-12-15T19:20:53.996Z" }, - { url = "https://files.pythonhosted.org/packages/27/07/2273972f69ca63dbc139694a3fc4684edec3ea3f9efabf77ed32483b875c/tornado-6.5.4-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9c86b1643b33a4cd415f8d0fe53045f913bf07b4a3ef646b735a6a86047dda84", size = 446003, upload-time = "2025-12-15T19:20:56.101Z" }, - { url = "https://files.pythonhosted.org/packages/d1/83/41c52e47502bf7260044413b6770d1a48dda2f0246f95ee1384a3cd9c44a/tornado-6.5.4-cp39-abi3-musllinux_1_2_i686.whl", hash = "sha256:6eb82872335a53dd063a4f10917b3efd28270b56a33db69009606a0312660a6f", size = 445412, upload-time = "2025-12-15T19:20:57.398Z" }, - { url = "https://files.pythonhosted.org/packages/10/c7/bc96917f06cbee182d44735d4ecde9c432e25b84f4c2086143013e7b9e52/tornado-6.5.4-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6076d5dda368c9328ff41ab5d9dd3608e695e8225d1cd0fd1e006f05da3635a8", size = 445392, upload-time = "2025-12-15T19:20:58.692Z" }, - { url = "https://files.pythonhosted.org/packages/0c/1a/d7592328d037d36f2d2462f4bc1fbb383eec9278bc786c1b111cbbd44cfa/tornado-6.5.4-cp39-abi3-win32.whl", hash = "sha256:1768110f2411d5cd281bac0a090f707223ce77fd110424361092859e089b38d1", size = 446481, upload-time = "2025-12-15T19:21:00.008Z" }, - { url = "https://files.pythonhosted.org/packages/d6/6d/c69be695a0a64fd37a97db12355a035a6d90f79067a3cf936ec2b1dc38cd/tornado-6.5.4-cp39-abi3-win_amd64.whl", hash = "sha256:fa07d31e0cd85c60713f2b995da613588aa03e1303d75705dca6af8babc18ddc", size = 446886, upload-time = "2025-12-15T19:21:01.287Z" }, - { url = "https://files.pythonhosted.org/packages/50/49/8dc3fd90902f70084bd2cd059d576ddb4f8bb44c2c7c0e33a11422acb17e/tornado-6.5.4-cp39-abi3-win_arm64.whl", hash = "sha256:053e6e16701eb6cbe641f308f4c1a9541f91b6261991160391bfc342e8a551a1", size = 445910, upload-time = "2025-12-15T19:21:02.571Z" }, + { url = "https://files.pythonhosted.org/packages/59/8c/77f5097695f4dd8255ecbd08b2a1ed8ba8b953d337804dd7080f199e12bf/tornado-6.5.5-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:487dc9cc380e29f58c7ab88f9e27cdeef04b2140862e5076a66fb6bb68bb1bfa", size = 445983, upload-time = "2026-03-10T21:30:44.28Z" }, + { url = "https://files.pythonhosted.org/packages/ab/5e/7625b76cd10f98f1516c36ce0346de62061156352353ef2da44e5c21523c/tornado-6.5.5-cp39-abi3-macosx_10_9_x86_64.whl", hash = "sha256:65a7f1d46d4bb41df1ac99f5fcb685fb25c7e61613742d5108b010975a9a6521", size = 444246, upload-time = "2026-03-10T21:30:46.571Z" }, + { url = "https://files.pythonhosted.org/packages/b2/04/7b5705d5b3c0fab088f434f9c83edac1573830ca49ccf29fb83bf7178eec/tornado-6.5.5-cp39-abi3-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:e74c92e8e65086b338fd56333fb9a68b9f6f2fe7ad532645a290a464bcf46be5", size = 447229, upload-time = "2026-03-10T21:30:48.273Z" }, + { url = "https://files.pythonhosted.org/packages/34/01/74e034a30ef59afb4097ef8659515e96a39d910b712a89af76f5e4e1f93c/tornado-6.5.5-cp39-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:435319e9e340276428bbdb4e7fa732c2d399386d1de5686cb331ec8eee754f07", size = 448192, upload-time = "2026-03-10T21:30:51.22Z" }, + { url = "https://files.pythonhosted.org/packages/be/00/fe9e02c5a96429fce1a1d15a517f5d8444f9c412e0bb9eadfbe3b0fc55bf/tornado-6.5.5-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:3f54aa540bdbfee7b9eb268ead60e7d199de5021facd276819c193c0fb28ea4e", size = 448039, upload-time = "2026-03-10T21:30:53.52Z" }, + { url = "https://files.pythonhosted.org/packages/82/9e/656ee4cec0398b1d18d0f1eb6372c41c6b889722641d84948351ae19556d/tornado-6.5.5-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:36abed1754faeb80fbd6e64db2758091e1320f6bba74a4cf8c09cd18ccce8aca", size = 447445, upload-time = "2026-03-10T21:30:55.541Z" }, + { url = "https://files.pythonhosted.org/packages/5a/76/4921c00511f88af86a33de770d64141170f1cfd9c00311aea689949e274e/tornado-6.5.5-cp39-abi3-win32.whl", hash = "sha256:dd3eafaaeec1c7f2f8fdcd5f964e8907ad788fe8a5a32c4426fbbdda621223b7", size = 448582, upload-time = "2026-03-10T21:30:57.142Z" }, + { url = "https://files.pythonhosted.org/packages/2c/23/f6c6112a04d28eed765e374435fb1a9198f73e1ec4b4024184f21faeb1ad/tornado-6.5.5-cp39-abi3-win_amd64.whl", hash = "sha256:6443a794ba961a9f619b1ae926a2e900ac20c34483eea67be4ed8f1e58d3ef7b", size = 448990, upload-time = "2026-03-10T21:30:58.857Z" }, + { url = "https://files.pythonhosted.org/packages/b7/c8/876602cbc96469911f0939f703453c1157b0c826ecb05bdd32e023397d4e/tornado-6.5.5-cp39-abi3-win_arm64.whl", hash = "sha256:2c9a876e094109333f888539ddb2de4361743e5d21eece20688e3e351e4990a6", size = 448016, upload-time = "2026-03-10T21:31:00.43Z" }, ] [[package]] name = "tqdm" -version = "4.67.1" +version = "4.67.3" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "colorama", marker = "sys_platform == 'win32' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "colorama", marker = "sys_platform == 'win32' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a8/4b/29b4ef32e036bb34e4ab51796dd745cdba7ed47ad142a9f4a1eb8e0c744d/tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2", size = 169737, upload-time = "2024-11-24T20:12:22.481Z" } +sdist = { url = "https://files.pythonhosted.org/packages/09/a9/6ba95a270c6f1fbcd8dac228323f2777d886cb206987444e4bce66338dd4/tqdm-4.67.3.tar.gz", hash = "sha256:7d825f03f89244ef73f1d4ce193cb1774a8179fd96f31d7e1dcde62092b960bb", size = 169598, upload-time = "2026-02-03T17:35:53.048Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d0/30/dc54f88dd4a2b5dc8a0279bdd7270e735851848b762aeb1c1184ed1f6b14/tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2", size = 78540, upload-time = "2024-11-24T20:12:19.698Z" }, + { url = "https://files.pythonhosted.org/packages/16/e1/3079a9ff9b8e11b846c6ac5c8b5bfb7ff225eee721825310c91b3b50304f/tqdm-4.67.3-py3-none-any.whl", hash = "sha256:ee1e4c0e59148062281c49d80b25b67771a127c85fc9676d3be5f243206826bf", size = 78374, upload-time = "2026-02-03T17:35:50.982Z" }, ] [[package]] @@ -5091,7 +16311,9 @@ version = "5.2.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "huggingface-hub" }, - { name = "numpy" }, + { name = "numpy", version = "1.26.4", source = { registry = "https://pypi.org/simple" }, marker = "extra == 'extra-16-inference-models-onnx-jp6-cu126' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and extra == 'extra-16-inference-models-falcon-perception') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "numpy", version = "2.4.4", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and extra == 'extra-16-inference-models-falcon-perception') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (python_full_version >= '3.11' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, { name = "packaging" }, { name = "pyyaml" }, { name = "regex" }, @@ -5110,9 +16332,9 @@ name = "triton" version = "3.2.0" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "(python_full_version >= '3.12' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.12' and sys_platform != 'darwin' and sys_platform != 'linux')", - "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.11.*' and sys_platform != 'darwin' and sys_platform != 'linux')", - "(python_full_version < '3.11' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform != 'darwin' and sys_platform != 'linux')", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", ] wheels = [ { url = "https://files.pythonhosted.org/packages/01/65/3ffa90e158a2c82f0716eee8d26a725d241549b7d7aaf7e4f44ac03ebd89/triton-3.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3e54983cd51875855da7c68ec05c05cf8bb08df361b1d5b69e05e40b0c9bd62", size = 253090354, upload-time = "2025-01-22T19:12:21.872Z" }, @@ -5125,15 +16347,12 @@ name = "triton" version = "3.3.1" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux'", - "(python_full_version >= '3.12' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.12' and sys_platform != 'darwin' and sys_platform != 'linux')", - "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux'", - "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.11.*' and sys_platform != 'darwin' and sys_platform != 'linux')", - "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux'", - "(python_full_version < '3.11' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform != 'darwin' and sys_platform != 'linux')", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux')", ] dependencies = [ - { name = "setuptools", marker = "(platform_machine != 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "setuptools", marker = "(platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu118') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (platform_machine != 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'darwin' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (sys_platform == 'linux' and extra != 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, ] wheels = [ { url = "https://files.pythonhosted.org/packages/8d/a9/549e51e9b1b2c9b854fd761a1d23df0ba2fbc60bd0c13b489ffa518cfcb7/triton-3.3.1-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b74db445b1c562844d3cfad6e9679c72e93fdfb1a90a24052b03bb5c49d1242e", size = 155600257, upload-time = "2025-05-29T23:39:36.085Z" }, @@ -5141,6 +16360,114 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/24/5f/950fb373bf9c01ad4eb5a8cd5eaf32cdf9e238c02f9293557a2129b9c4ac/triton-3.3.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9999e83aba21e1a78c1f36f21bce621b77bcaa530277a50484a7cb4a822f6e43", size = 155669138, upload-time = "2025-05-29T23:39:51.771Z" }, ] +[[package]] +name = "triton" +version = "3.6.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'win32' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra == 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version == '3.11.*' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version == '3.11.*' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_machine != 's390x' and sys_platform == 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126') or (python_full_version < '3.11' and platform_machine != 's390x' and sys_platform != 'darwin' and sys_platform != 'linux' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126')", + "python_full_version < '3.11' and platform_machine == 's390x' and sys_platform != 'darwin' and extra != 'extra-16-inference-models-falcon-perception' and extra != 'extra-16-inference-models-onnx-cpu' and extra != 'extra-16-inference-models-onnx-cu118' and extra != 'extra-16-inference-models-onnx-cu12' and extra != 'extra-16-inference-models-onnx-jp6-cu126' and extra != 'extra-16-inference-models-torch-cpu' and extra != 'extra-16-inference-models-torch-cu118' and extra != 'extra-16-inference-models-torch-cu124' and extra != 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128' and extra != 'extra-16-inference-models-torch-cu130' and extra != 'extra-16-inference-models-torch-jp6-cu126'", +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/44/ba/b1b04f4b291a3205d95ebd24465de0e5bf010a2df27a4e58a9b5f039d8f2/triton-3.6.0-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6c723cfb12f6842a0ae94ac307dba7e7a44741d720a40cf0e270ed4a4e3be781", size = 175972180, upload-time = "2026-01-20T16:15:53.664Z" }, + { url = "https://files.pythonhosted.org/packages/8c/f7/f1c9d3424ab199ac53c2da567b859bcddbb9c9e7154805119f8bd95ec36f/triton-3.6.0-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a6550fae429e0667e397e5de64b332d1e5695b73650ee75a6146e2e902770bea", size = 188105201, upload-time = "2026-01-20T16:00:29.272Z" }, + { url = "https://files.pythonhosted.org/packages/0f/2c/96f92f3c60387e14cc45aed49487f3486f89ea27106c1b1376913c62abe4/triton-3.6.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:49df5ef37379c0c2b5c0012286f80174fcf0e073e5ade1ca9a86c36814553651", size = 176081190, upload-time = "2026-01-20T16:16:00.523Z" }, + { url = "https://files.pythonhosted.org/packages/e0/12/b05ba554d2c623bffa59922b94b0775673de251f468a9609bc9e45de95e9/triton-3.6.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e8e323d608e3a9bfcc2d9efcc90ceefb764a82b99dea12a86d643c72539ad5d3", size = 188214640, upload-time = "2026-01-20T16:00:35.869Z" }, + { url = "https://files.pythonhosted.org/packages/17/5d/08201db32823bdf77a0e2b9039540080b2e5c23a20706ddba942924ebcd6/triton-3.6.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:374f52c11a711fd062b4bfbb201fd9ac0a5febd28a96fb41b4a0f51dde3157f4", size = 176128243, upload-time = "2026-01-20T16:16:07.857Z" }, + { url = "https://files.pythonhosted.org/packages/ab/a8/cdf8b3e4c98132f965f88c2313a4b493266832ad47fb52f23d14d4f86bb5/triton-3.6.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:74caf5e34b66d9f3a429af689c1c7128daba1d8208df60e81106b115c00d6fca", size = 188266850, upload-time = "2026-01-20T16:00:43.041Z" }, +] + +[[package]] +name = "typeguard" +version = "4.5.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/2b/e8/66e25efcc18542d58706ce4e50415710593721aae26e794ab1dec34fb66f/typeguard-4.5.1.tar.gz", hash = "sha256:f6f8ecbbc819c9bc749983cc67c02391e16a9b43b8b27f15dc70ed7c4a007274", size = 80121, upload-time = "2026-02-19T16:09:03.392Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/91/88/b55b3117287a8540b76dbdd87733808d4d01c8067a3b339408c250bb3600/typeguard-4.5.1-py3-none-any.whl", hash = "sha256:44d2bf329d49a244110a090b55f5f91aa82d9a9834ebfd30bcc73651e4a8cc40", size = 36745, upload-time = "2026-02-19T16:09:01.6Z" }, +] + [[package]] name = "typer" version = "0.24.1" @@ -5170,41 +16497,55 @@ wheels = [ [[package]] name = "typing-extensions" -version = "4.14.0" +version = "4.15.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d1/bc/51647cd02527e87d05cb083ccc402f93e441606ff1f01739a62c8ad09ba5/typing_extensions-4.14.0.tar.gz", hash = "sha256:8676b788e32f02ab42d9e7c61324048ae4c6d844a399eebace3d4979d75ceef4", size = 107423, upload-time = "2025-06-02T14:52:11.399Z" } +sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/69/e0/552843e0d356fbb5256d21449fa957fa4eff3bbc135a74a691ee70c7c5da/typing_extensions-4.14.0-py3-none-any.whl", hash = "sha256:a1514509136dd0b477638fc68d6a91497af5076466ad0fa6c338e44e359944af", size = 43839, upload-time = "2025-06-02T14:52:10.026Z" }, + { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" }, ] [[package]] name = "typing-inspection" -version = "0.4.1" +version = "0.4.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/55/e3/70399cb7dd41c10ac53367ae42139cf4b1ca5f36bb3dc6c9d33acdb43655/typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464", size = 75949, upload-time = "2025-10-01T02:14:41.687Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7", size = 14611, upload-time = "2025-10-01T02:14:40.154Z" }, +] + +[[package]] +name = "tyro" +version = "1.0.12" source = { registry = "https://pypi.org/simple" } dependencies = [ + { name = "docstring-parser" }, + { name = "typeguard" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f8/b1/0c11f5058406b3af7609f121aaa6b609744687f1d158b3c3a5bf4cc94238/typing_inspection-0.4.1.tar.gz", hash = "sha256:6ae134cc0203c33377d43188d4064e9b357dba58cff3185f22924610e70a9d28", size = 75726, upload-time = "2025-05-21T18:55:23.885Z" } +sdist = { url = "https://files.pythonhosted.org/packages/aa/3e/96397637152a9ce368a575e88e1782a9fe4640a3950e26805ec9cca569c1/tyro-1.0.12.tar.gz", hash = "sha256:b49177ab9668e0c9255025d3936b6647cc07537ca04a4ed34362099907508ba2", size = 489159, upload-time = "2026-04-02T09:48:58.294Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/17/69/cd203477f944c353c31bade965f880aa1061fd6bf05ded0726ca845b6ff7/typing_inspection-0.4.1-py3-none-any.whl", hash = "sha256:389055682238f53b04f7badcb49b989835495a96700ced5dab2d8feae4b26f51", size = 14552, upload-time = "2025-05-21T18:55:22.152Z" }, + { url = "https://files.pythonhosted.org/packages/8d/8e/fa21200c7fcebdbc5a1f7ebe2d437d69b0723bfaeea843c846b99248b2ba/tyro-1.0.12-py3-none-any.whl", hash = "sha256:d9c7b65e05c1b6760204bcb4d0b1ef76f235b16f1423d056a0d36814c2df902d", size = 185245, upload-time = "2026-04-02T09:48:56.808Z" }, ] [[package]] name = "tzdata" -version = "2025.3" +version = "2026.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/5e/a7/c202b344c5ca7daf398f3b8a477eeb205cf3b6f32e7ec3a6bac0629ca975/tzdata-2025.3.tar.gz", hash = "sha256:de39c2ca5dc7b0344f2eba86f49d614019d29f060fc4ebc8a417896a620b56a7", size = 196772, upload-time = "2025-12-13T17:45:35.667Z" } +sdist = { url = "https://files.pythonhosted.org/packages/19/f5/cd531b2d15a671a40c0f66cf06bc3570a12cd56eef98960068ebbad1bf5a/tzdata-2026.1.tar.gz", hash = "sha256:67658a1903c75917309e753fdc349ac0efd8c27db7a0cb406a25be4840f87f98", size = 197639, upload-time = "2026-04-03T11:25:22.002Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c7/b0/003792df09decd6849a5e39c28b513c06e84436a54440380862b5aeff25d/tzdata-2025.3-py2.py3-none-any.whl", hash = "sha256:06a47e5700f3081aab02b2e513160914ff0694bce9947d6b76ebd6bf57cfc5d1", size = 348521, upload-time = "2025-12-13T17:45:33.889Z" }, + { url = "https://files.pythonhosted.org/packages/b0/70/d460bd685a170790ec89317e9bd33047988e4bce507b831f5db771e142de/tzdata-2026.1-py2.py3-none-any.whl", hash = "sha256:4b1d2be7ac37ceafd7327b961aa3a54e467efbdb563a23655fbfe0d39cfc42a9", size = 348952, upload-time = "2026-04-03T11:25:20.313Z" }, ] [[package]] name = "urllib3" -version = "2.5.0" +version = "2.6.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/15/22/9ee70a2574a4f4599c47dd506532914ce044817c7752a79b6a51286319bc/urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760", size = 393185, upload-time = "2025-06-18T14:07:41.644Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c7/24/5f1b3bdffd70275f6661c76461e25f024d5a38a46f04aaca912426a2b1d3/urllib3-2.6.3.tar.gz", hash = "sha256:1b62b6884944a57dbe321509ab94fd4d3b307075e0c2eae991ac71ee15ad38ed", size = 435556, upload-time = "2026-01-07T16:24:43.925Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a7/c2/fe1e52489ae3122415c51f387e221dd0773709bad6c6cdaa599e8a2c5185/urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc", size = 129795, upload-time = "2025-06-18T14:07:40.39Z" }, + { url = "https://files.pythonhosted.org/packages/39/08/aaaad47bc4e9dc8c725e68f9d04865dbcb2052843ff09c97b08904852d84/urllib3-2.6.3-py3-none-any.whl", hash = "sha256:bf272323e553dfb2e87d9bfd225ca7b0f467b919d7bbd355436d3fd37cb0acd4", size = 131584, upload-time = "2026-01-07T16:24:42.685Z" }, ] [[package]] @@ -5256,11 +16597,11 @@ wheels = [ [[package]] name = "wcwidth" -version = "0.2.13" +version = "0.6.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/6c/63/53559446a878410fc5a5974feb13d31d78d752eb18aeba59c7fef1af7598/wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5", size = 101301, upload-time = "2024-01-06T02:10:57.829Z" } +sdist = { url = "https://files.pythonhosted.org/packages/35/a2/8e3becb46433538a38726c948d3399905a4c7cabd0df578ede5dc51f0ec2/wcwidth-0.6.0.tar.gz", hash = "sha256:cdc4e4262d6ef9a1a57e018384cbeb1208d8abbc64176027e2c2455c81313159", size = 159684, upload-time = "2026-02-06T19:19:40.919Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/fd/84/fd2ba7aafacbad3c4201d395674fc6348826569da3c0937e75505ead3528/wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859", size = 34166, upload-time = "2024-01-06T02:10:55.763Z" }, + { url = "https://files.pythonhosted.org/packages/68/5a/199c59e0a824a3db2b89c5d2dade7ab5f9624dbf6448dc291b46d5ec94d3/wcwidth-0.6.0-py3-none-any.whl", hash = "sha256:1a3a1e510b553315f8e146c54764f4fb6264ffad731b3d78088cdb1478ffbdad", size = 94189, upload-time = "2026-02-06T19:19:39.646Z" }, ] [[package]] @@ -5272,13 +16613,71 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/f4/24/2a3e3df732393fed8b3ebf2ec078f05546de641fe1b667ee316ec1dcf3b7/webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78", size = 11774, upload-time = "2017-04-05T20:21:32.581Z" }, ] +[[package]] +name = "xxhash" +version = "3.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/02/84/30869e01909fb37a6cc7e18688ee8bf1e42d57e7e0777636bd47524c43c7/xxhash-3.6.0.tar.gz", hash = "sha256:f0162a78b13a0d7617b2845b90c763339d1f1d82bb04a4b07f4ab535cc5e05d6", size = 85160, upload-time = "2025-10-02T14:37:08.097Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/34/ee/f9f1d656ad168681bb0f6b092372c1e533c4416b8069b1896a175c46e484/xxhash-3.6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:87ff03d7e35c61435976554477a7f4cd1704c3596a89a8300d5ce7fc83874a71", size = 32845, upload-time = "2025-10-02T14:33:51.573Z" }, + { url = "https://files.pythonhosted.org/packages/a3/b1/93508d9460b292c74a09b83d16750c52a0ead89c51eea9951cb97a60d959/xxhash-3.6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f572dfd3d0e2eb1a57511831cf6341242f5a9f8298a45862d085f5b93394a27d", size = 30807, upload-time = "2025-10-02T14:33:52.964Z" }, + { url = "https://files.pythonhosted.org/packages/07/55/28c93a3662f2d200c70704efe74aab9640e824f8ce330d8d3943bf7c9b3c/xxhash-3.6.0-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:89952ea539566b9fed2bbd94e589672794b4286f342254fad28b149f9615fef8", size = 193786, upload-time = "2025-10-02T14:33:54.272Z" }, + { url = "https://files.pythonhosted.org/packages/c1/96/fec0be9bb4b8f5d9c57d76380a366f31a1781fb802f76fc7cda6c84893c7/xxhash-3.6.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:48e6f2ffb07a50b52465a1032c3cf1f4a5683f944acaca8a134a2f23674c2058", size = 212830, upload-time = "2025-10-02T14:33:55.706Z" }, + { url = "https://files.pythonhosted.org/packages/c4/a0/c706845ba77b9611f81fd2e93fad9859346b026e8445e76f8c6fd057cc6d/xxhash-3.6.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b5b848ad6c16d308c3ac7ad4ba6bede80ed5df2ba8ed382f8932df63158dd4b2", size = 211606, upload-time = "2025-10-02T14:33:57.133Z" }, + { url = "https://files.pythonhosted.org/packages/67/1e/164126a2999e5045f04a69257eea946c0dc3e86541b400d4385d646b53d7/xxhash-3.6.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a034590a727b44dd8ac5914236a7b8504144447a9682586c3327e935f33ec8cc", size = 444872, upload-time = "2025-10-02T14:33:58.446Z" }, + { url = "https://files.pythonhosted.org/packages/2d/4b/55ab404c56cd70a2cf5ecfe484838865d0fea5627365c6c8ca156bd09c8f/xxhash-3.6.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8a8f1972e75ebdd161d7896743122834fe87378160c20e97f8b09166213bf8cc", size = 193217, upload-time = "2025-10-02T14:33:59.724Z" }, + { url = "https://files.pythonhosted.org/packages/45/e6/52abf06bac316db33aa269091ae7311bd53cfc6f4b120ae77bac1b348091/xxhash-3.6.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ee34327b187f002a596d7b167ebc59a1b729e963ce645964bbc050d2f1b73d07", size = 210139, upload-time = "2025-10-02T14:34:02.041Z" }, + { url = "https://files.pythonhosted.org/packages/34/37/db94d490b8691236d356bc249c08819cbcef9273a1a30acf1254ff9ce157/xxhash-3.6.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:339f518c3c7a850dd033ab416ea25a692759dc7478a71131fe8869010d2b75e4", size = 197669, upload-time = "2025-10-02T14:34:03.664Z" }, + { url = "https://files.pythonhosted.org/packages/b7/36/c4f219ef4a17a4f7a64ed3569bc2b5a9c8311abdb22249ac96093625b1a4/xxhash-3.6.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:bf48889c9630542d4709192578aebbd836177c9f7a4a2778a7d6340107c65f06", size = 210018, upload-time = "2025-10-02T14:34:05.325Z" }, + { url = "https://files.pythonhosted.org/packages/fd/06/bfac889a374fc2fc439a69223d1750eed2e18a7db8514737ab630534fa08/xxhash-3.6.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:5576b002a56207f640636056b4160a378fe36a58db73ae5c27a7ec8db35f71d4", size = 413058, upload-time = "2025-10-02T14:34:06.925Z" }, + { url = "https://files.pythonhosted.org/packages/c9/d1/555d8447e0dd32ad0930a249a522bb2e289f0d08b6b16204cfa42c1f5a0c/xxhash-3.6.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:af1f3278bd02814d6dedc5dec397993b549d6f16c19379721e5a1d31e132c49b", size = 190628, upload-time = "2025-10-02T14:34:08.669Z" }, + { url = "https://files.pythonhosted.org/packages/d1/15/8751330b5186cedc4ed4b597989882ea05e0408b53fa47bcb46a6125bfc6/xxhash-3.6.0-cp310-cp310-win32.whl", hash = "sha256:aed058764db109dc9052720da65fafe84873b05eb8b07e5e653597951af57c3b", size = 30577, upload-time = "2025-10-02T14:34:10.234Z" }, + { url = "https://files.pythonhosted.org/packages/bb/cc/53f87e8b5871a6eb2ff7e89c48c66093bda2be52315a8161ddc54ea550c4/xxhash-3.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:e82da5670f2d0d98950317f82a0e4a0197150ff19a6df2ba40399c2a3b9ae5fb", size = 31487, upload-time = "2025-10-02T14:34:11.618Z" }, + { url = "https://files.pythonhosted.org/packages/9f/00/60f9ea3bb697667a14314d7269956f58bf56bb73864f8f8d52a3c2535e9a/xxhash-3.6.0-cp310-cp310-win_arm64.whl", hash = "sha256:4a082ffff8c6ac07707fb6b671caf7c6e020c75226c561830b73d862060f281d", size = 27863, upload-time = "2025-10-02T14:34:12.619Z" }, + { url = "https://files.pythonhosted.org/packages/17/d4/cc2f0400e9154df4b9964249da78ebd72f318e35ccc425e9f403c392f22a/xxhash-3.6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b47bbd8cf2d72797f3c2772eaaac0ded3d3af26481a26d7d7d41dc2d3c46b04a", size = 32844, upload-time = "2025-10-02T14:34:14.037Z" }, + { url = "https://files.pythonhosted.org/packages/5e/ec/1cc11cd13e26ea8bc3cb4af4eaadd8d46d5014aebb67be3f71fb0b68802a/xxhash-3.6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2b6821e94346f96db75abaa6e255706fb06ebd530899ed76d32cd99f20dc52fa", size = 30809, upload-time = "2025-10-02T14:34:15.484Z" }, + { url = "https://files.pythonhosted.org/packages/04/5f/19fe357ea348d98ca22f456f75a30ac0916b51c753e1f8b2e0e6fb884cce/xxhash-3.6.0-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:d0a9751f71a1a65ce3584e9cae4467651c7e70c9d31017fa57574583a4540248", size = 194665, upload-time = "2025-10-02T14:34:16.541Z" }, + { url = "https://files.pythonhosted.org/packages/90/3b/d1f1a8f5442a5fd8beedae110c5af7604dc37349a8e16519c13c19a9a2de/xxhash-3.6.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8b29ee68625ab37b04c0b40c3fafdf24d2f75ccd778333cfb698f65f6c463f62", size = 213550, upload-time = "2025-10-02T14:34:17.878Z" }, + { url = "https://files.pythonhosted.org/packages/c4/ef/3a9b05eb527457d5db13a135a2ae1a26c80fecd624d20f3e8dcc4cb170f3/xxhash-3.6.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:6812c25fe0d6c36a46ccb002f40f27ac903bf18af9f6dd8f9669cb4d176ab18f", size = 212384, upload-time = "2025-10-02T14:34:19.182Z" }, + { url = "https://files.pythonhosted.org/packages/0f/18/ccc194ee698c6c623acbf0f8c2969811a8a4b6185af5e824cd27b9e4fd3e/xxhash-3.6.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:4ccbff013972390b51a18ef1255ef5ac125c92dc9143b2d1909f59abc765540e", size = 445749, upload-time = "2025-10-02T14:34:20.659Z" }, + { url = "https://files.pythonhosted.org/packages/a5/86/cf2c0321dc3940a7aa73076f4fd677a0fb3e405cb297ead7d864fd90847e/xxhash-3.6.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:297b7fbf86c82c550e12e8fb71968b3f033d27b874276ba3624ea868c11165a8", size = 193880, upload-time = "2025-10-02T14:34:22.431Z" }, + { url = "https://files.pythonhosted.org/packages/82/fb/96213c8560e6f948a1ecc9a7613f8032b19ee45f747f4fca4eb31bb6d6ed/xxhash-3.6.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:dea26ae1eb293db089798d3973a5fc928a18fdd97cc8801226fae705b02b14b0", size = 210912, upload-time = "2025-10-02T14:34:23.937Z" }, + { url = "https://files.pythonhosted.org/packages/40/aa/4395e669b0606a096d6788f40dbdf2b819d6773aa290c19e6e83cbfc312f/xxhash-3.6.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:7a0b169aafb98f4284f73635a8e93f0735f9cbde17bd5ec332480484241aaa77", size = 198654, upload-time = "2025-10-02T14:34:25.644Z" }, + { url = "https://files.pythonhosted.org/packages/67/74/b044fcd6b3d89e9b1b665924d85d3f400636c23590226feb1eb09e1176ce/xxhash-3.6.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:08d45aef063a4531b785cd72de4887766d01dc8f362a515693df349fdb825e0c", size = 210867, upload-time = "2025-10-02T14:34:27.203Z" }, + { url = "https://files.pythonhosted.org/packages/bc/fd/3ce73bf753b08cb19daee1eb14aa0d7fe331f8da9c02dd95316ddfe5275e/xxhash-3.6.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:929142361a48ee07f09121fe9e96a84950e8d4df3bb298ca5d88061969f34d7b", size = 414012, upload-time = "2025-10-02T14:34:28.409Z" }, + { url = "https://files.pythonhosted.org/packages/ba/b3/5a4241309217c5c876f156b10778f3ab3af7ba7e3259e6d5f5c7d0129eb2/xxhash-3.6.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:51312c768403d8540487dbbfb557454cfc55589bbde6424456951f7fcd4facb3", size = 191409, upload-time = "2025-10-02T14:34:29.696Z" }, + { url = "https://files.pythonhosted.org/packages/c0/01/99bfbc15fb9abb9a72b088c1d95219fc4782b7d01fc835bd5744d66dd0b8/xxhash-3.6.0-cp311-cp311-win32.whl", hash = "sha256:d1927a69feddc24c987b337ce81ac15c4720955b667fe9b588e02254b80446fd", size = 30574, upload-time = "2025-10-02T14:34:31.028Z" }, + { url = "https://files.pythonhosted.org/packages/65/79/9d24d7f53819fe301b231044ea362ce64e86c74f6e8c8e51320de248b3e5/xxhash-3.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:26734cdc2d4ffe449b41d186bbeac416f704a482ed835d375a5c0cb02bc63fef", size = 31481, upload-time = "2025-10-02T14:34:32.062Z" }, + { url = "https://files.pythonhosted.org/packages/30/4e/15cd0e3e8772071344eab2961ce83f6e485111fed8beb491a3f1ce100270/xxhash-3.6.0-cp311-cp311-win_arm64.whl", hash = "sha256:d72f67ef8bf36e05f5b6c65e8524f265bd61071471cd4cf1d36743ebeeeb06b7", size = 27861, upload-time = "2025-10-02T14:34:33.555Z" }, + { url = "https://files.pythonhosted.org/packages/9a/07/d9412f3d7d462347e4511181dea65e47e0d0e16e26fbee2ea86a2aefb657/xxhash-3.6.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:01362c4331775398e7bb34e3ab403bc9ee9f7c497bc7dee6272114055277dd3c", size = 32744, upload-time = "2025-10-02T14:34:34.622Z" }, + { url = "https://files.pythonhosted.org/packages/79/35/0429ee11d035fc33abe32dca1b2b69e8c18d236547b9a9b72c1929189b9a/xxhash-3.6.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b7b2df81a23f8cb99656378e72501b2cb41b1827c0f5a86f87d6b06b69f9f204", size = 30816, upload-time = "2025-10-02T14:34:36.043Z" }, + { url = "https://files.pythonhosted.org/packages/b7/f2/57eb99aa0f7d98624c0932c5b9a170e1806406cdbcdb510546634a1359e0/xxhash-3.6.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:dc94790144e66b14f67b10ac8ed75b39ca47536bf8800eb7c24b50271ea0c490", size = 194035, upload-time = "2025-10-02T14:34:37.354Z" }, + { url = "https://files.pythonhosted.org/packages/4c/ed/6224ba353690d73af7a3f1c7cdb1fc1b002e38f783cb991ae338e1eb3d79/xxhash-3.6.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:93f107c673bccf0d592cdba077dedaf52fe7f42dcd7676eba1f6d6f0c3efffd2", size = 212914, upload-time = "2025-10-02T14:34:38.6Z" }, + { url = "https://files.pythonhosted.org/packages/38/86/fb6b6130d8dd6b8942cc17ab4d90e223653a89aa32ad2776f8af7064ed13/xxhash-3.6.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2aa5ee3444c25b69813663c9f8067dcfaa2e126dc55e8dddf40f4d1c25d7effa", size = 212163, upload-time = "2025-10-02T14:34:39.872Z" }, + { url = "https://files.pythonhosted.org/packages/ee/dc/e84875682b0593e884ad73b2d40767b5790d417bde603cceb6878901d647/xxhash-3.6.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f7f99123f0e1194fa59cc69ad46dbae2e07becec5df50a0509a808f90a0f03f0", size = 445411, upload-time = "2025-10-02T14:34:41.569Z" }, + { url = "https://files.pythonhosted.org/packages/11/4f/426f91b96701ec2f37bb2b8cec664eff4f658a11f3fa9d94f0a887ea6d2b/xxhash-3.6.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:49e03e6fe2cac4a1bc64952dd250cf0dbc5ef4ebb7b8d96bce82e2de163c82a2", size = 193883, upload-time = "2025-10-02T14:34:43.249Z" }, + { url = "https://files.pythonhosted.org/packages/53/5a/ddbb83eee8e28b778eacfc5a85c969673e4023cdeedcfcef61f36731610b/xxhash-3.6.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:bd17fede52a17a4f9a7bc4472a5867cb0b160deeb431795c0e4abe158bc784e9", size = 210392, upload-time = "2025-10-02T14:34:45.042Z" }, + { url = "https://files.pythonhosted.org/packages/1e/c2/ff69efd07c8c074ccdf0a4f36fcdd3d27363665bcdf4ba399abebe643465/xxhash-3.6.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:6fb5f5476bef678f69db04f2bd1efbed3030d2aba305b0fc1773645f187d6a4e", size = 197898, upload-time = "2025-10-02T14:34:46.302Z" }, + { url = "https://files.pythonhosted.org/packages/58/ca/faa05ac19b3b622c7c9317ac3e23954187516298a091eb02c976d0d3dd45/xxhash-3.6.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:843b52f6d88071f87eba1631b684fcb4b2068cd2180a0224122fe4ef011a9374", size = 210655, upload-time = "2025-10-02T14:34:47.571Z" }, + { url = "https://files.pythonhosted.org/packages/d4/7a/06aa7482345480cc0cb597f5c875b11a82c3953f534394f620b0be2f700c/xxhash-3.6.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:7d14a6cfaf03b1b6f5f9790f76880601ccc7896aff7ab9cd8978a939c1eb7e0d", size = 414001, upload-time = "2025-10-02T14:34:49.273Z" }, + { url = "https://files.pythonhosted.org/packages/23/07/63ffb386cd47029aa2916b3d2f454e6cc5b9f5c5ada3790377d5430084e7/xxhash-3.6.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:418daf3db71e1413cfe211c2f9a528456936645c17f46b5204705581a45390ae", size = 191431, upload-time = "2025-10-02T14:34:50.798Z" }, + { url = "https://files.pythonhosted.org/packages/0f/93/14fde614cadb4ddf5e7cebf8918b7e8fac5ae7861c1875964f17e678205c/xxhash-3.6.0-cp312-cp312-win32.whl", hash = "sha256:50fc255f39428a27299c20e280d6193d8b63b8ef8028995323bf834a026b4fbb", size = 30617, upload-time = "2025-10-02T14:34:51.954Z" }, + { url = "https://files.pythonhosted.org/packages/13/5d/0d125536cbe7565a83d06e43783389ecae0c0f2ed037b48ede185de477c0/xxhash-3.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:c0f2ab8c715630565ab8991b536ecded9416d615538be8ecddce43ccf26cbc7c", size = 31534, upload-time = "2025-10-02T14:34:53.276Z" }, + { url = "https://files.pythonhosted.org/packages/54/85/6ec269b0952ec7e36ba019125982cf11d91256a778c7c3f98a4c5043d283/xxhash-3.6.0-cp312-cp312-win_arm64.whl", hash = "sha256:eae5c13f3bc455a3bbb68bdc513912dc7356de7e2280363ea235f71f54064829", size = 27876, upload-time = "2025-10-02T14:34:54.371Z" }, + { url = "https://files.pythonhosted.org/packages/93/1e/8aec23647a34a249f62e2398c42955acd9b4c6ed5cf08cbea94dc46f78d2/xxhash-3.6.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:0f7b7e2ec26c1666ad5fc9dbfa426a6a3367ceaf79db5dd76264659d509d73b0", size = 30662, upload-time = "2025-10-02T14:37:01.743Z" }, + { url = "https://files.pythonhosted.org/packages/b8/0b/b14510b38ba91caf43006209db846a696ceea6a847a0c9ba0a5b1adc53d6/xxhash-3.6.0-pp311-pypy311_pp73-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:5dc1e14d14fa0f5789ec29a7062004b5933964bb9b02aae6622b8f530dc40296", size = 41056, upload-time = "2025-10-02T14:37:02.879Z" }, + { url = "https://files.pythonhosted.org/packages/50/55/15a7b8a56590e66ccd374bbfa3f9ffc45b810886c8c3b614e3f90bd2367c/xxhash-3.6.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:881b47fc47e051b37d94d13e7455131054b56749b91b508b0907eb07900d1c13", size = 36251, upload-time = "2025-10-02T14:37:04.44Z" }, + { url = "https://files.pythonhosted.org/packages/62/b2/5ac99a041a29e58e95f907876b04f7067a0242cb85b5f39e726153981503/xxhash-3.6.0-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c6dc31591899f5e5666f04cc2e529e69b4072827085c1ef15294d91a004bc1bd", size = 32481, upload-time = "2025-10-02T14:37:05.869Z" }, + { url = "https://files.pythonhosted.org/packages/7b/d9/8d95e906764a386a3d3b596f3c68bb63687dfca806373509f51ce8eea81f/xxhash-3.6.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:15e0dac10eb9309508bfc41f7f9deaa7755c69e35af835db9cb10751adebc35d", size = 31565, upload-time = "2025-10-02T14:37:06.966Z" }, +] + [[package]] name = "yapf" version = "0.43.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "platformdirs" }, - { name = "tomli", marker = "python_full_version < '3.11' or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, + { name = "tomli", marker = "python_full_version < '3.11' or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cpu') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-falcon-perception' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu118') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cpu' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-cu12') or (extra == 'extra-16-inference-models-onnx-cu118' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-onnx-cu12' and extra == 'extra-16-inference-models-onnx-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu118') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cpu' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu124') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu118' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu126') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu124' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu128') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu126' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-cu130') or (extra == 'extra-16-inference-models-torch-cu128' and extra == 'extra-16-inference-models-torch-jp6-cu126') or (extra == 'extra-16-inference-models-torch-cu130' and extra == 'extra-16-inference-models-torch-jp6-cu126')" }, ] sdist = { url = "https://files.pythonhosted.org/packages/23/97/b6f296d1e9cc1ec25c7604178b48532fa5901f721bcf1b8d8148b13e5588/yapf-0.43.0.tar.gz", hash = "sha256:00d3aa24bfedff9420b2e0d5d9f5ab6d9d4268e72afbf59bb3fa542781d5218e", size = 254907, upload-time = "2024-11-14T00:11:41.584Z" } wheels = [ @@ -5286,10 +16685,69 @@ wheels = [ ] [[package]] -name = "zipp" -version = "3.23.0" +name = "yarl" +version = "1.23.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e3/02/0f2892c661036d50ede074e376733dca2ae7c6eb617489437771209d4180/zipp-3.23.0.tar.gz", hash = "sha256:a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166", size = 25547, upload-time = "2025-06-08T17:06:39.4Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/2e/54/647ade08bf0db230bfea292f893923872fd20be6ac6f53b2b936ba839d75/zipp-3.23.0-py3-none-any.whl", hash = "sha256:071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e", size = 10276, upload-time = "2025-06-08T17:06:38.034Z" }, +dependencies = [ + { name = "idna" }, + { name = "multidict" }, + { name = "propcache" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/23/6e/beb1beec874a72f23815c1434518bfc4ed2175065173fb138c3705f658d4/yarl-1.23.0.tar.gz", hash = "sha256:53b1ea6ca88ebd4420379c330aea57e258408dd0df9af0992e5de2078dc9f5d5", size = 194676, upload-time = "2026-03-01T22:07:53.373Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8b/0d/9cc638702f6fc3c7a3685bcc8cf2a9ed7d6206e932a49f5242658047ef51/yarl-1.23.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cff6d44cb13d39db2663a22b22305d10855efa0fa8015ddeacc40bc59b9d8107", size = 123764, upload-time = "2026-03-01T22:04:09.7Z" }, + { url = "https://files.pythonhosted.org/packages/7a/35/5a553687c5793df5429cd1db45909d4f3af7eee90014888c208d086a44f0/yarl-1.23.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e4c53f8347cd4200f0d70a48ad059cabaf24f5adc6ba08622a23423bc7efa10d", size = 86282, upload-time = "2026-03-01T22:04:11.892Z" }, + { url = "https://files.pythonhosted.org/packages/68/2e/c5a2234238f8ce37a8312b52801ee74117f576b1539eec8404a480434acc/yarl-1.23.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2a6940a074fb3c48356ed0158a3ca5699c955ee4185b4d7d619be3c327143e05", size = 86053, upload-time = "2026-03-01T22:04:13.292Z" }, + { url = "https://files.pythonhosted.org/packages/74/3f/bbd8ff36fb038622797ffbaf7db314918bb4d76f1cc8a4f9ca7a55fe5195/yarl-1.23.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ed5f69ce7be7902e5c70ea19eb72d20abf7d725ab5d49777d696e32d4fc1811d", size = 99395, upload-time = "2026-03-01T22:04:15.133Z" }, + { url = "https://files.pythonhosted.org/packages/77/04/9516bc4e269d2a3ec9c6779fcdeac51ce5b3a9b0156f06ac7152e5bba864/yarl-1.23.0-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:389871e65468400d6283c0308e791a640b5ab5c83bcee02a2f51295f95e09748", size = 92143, upload-time = "2026-03-01T22:04:16.829Z" }, + { url = "https://files.pythonhosted.org/packages/c7/63/88802d1f6b1cb1fc67d67a58cd0cf8a1790de4ce7946e434240f1d60ab4a/yarl-1.23.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:dda608c88cf709b1d406bdfcd84d8d63cff7c9e577a403c6108ce8ce9dcc8764", size = 107643, upload-time = "2026-03-01T22:04:18.519Z" }, + { url = "https://files.pythonhosted.org/packages/8e/db/4f9b838f4d8bdd6f0f385aed8bbf21c71ed11a0b9983305c302cbd557815/yarl-1.23.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8c4fe09e0780c6c3bf2b7d4af02ee2394439d11a523bbcf095cf4747c2932007", size = 108700, upload-time = "2026-03-01T22:04:20.373Z" }, + { url = "https://files.pythonhosted.org/packages/50/12/95a1d33f04a79c402664070d43b8b9f72dc18914e135b345b611b0b1f8cc/yarl-1.23.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:31c9921eb8bd12633b41ad27686bbb0b1a2a9b8452bfdf221e34f311e9942ed4", size = 102769, upload-time = "2026-03-01T22:04:23.055Z" }, + { url = "https://files.pythonhosted.org/packages/86/65/91a0285f51321369fd1a8308aa19207520c5f0587772cfc2e03fc2467e90/yarl-1.23.0-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:5f10fd85e4b75967468af655228fbfd212bdf66db1c0d135065ce288982eda26", size = 101114, upload-time = "2026-03-01T22:04:25.031Z" }, + { url = "https://files.pythonhosted.org/packages/58/80/c7c8244fc3e5bc483dc71a09560f43b619fab29301a0f0a8f936e42865c7/yarl-1.23.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:dbf507e9ef5688bada447a24d68b4b58dd389ba93b7afc065a2ba892bea54769", size = 98883, upload-time = "2026-03-01T22:04:27.281Z" }, + { url = "https://files.pythonhosted.org/packages/86/e7/71ca9cc9ca79c0b7d491216177d1aed559d632947b8ffb0ee60f7d8b23e3/yarl-1.23.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:85e9beda1f591bc73e77ea1c51965c68e98dafd0fec72cdd745f77d727466716", size = 94172, upload-time = "2026-03-01T22:04:28.554Z" }, + { url = "https://files.pythonhosted.org/packages/6a/3f/6c6c8a0fe29c26fb2db2e8d32195bb84ec1bfb8f1d32e7f73b787fcf349b/yarl-1.23.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:0e1fdaa14ef51366d7757b45bde294e95f6c8c049194e793eedb8387c86d5993", size = 107010, upload-time = "2026-03-01T22:04:30.385Z" }, + { url = "https://files.pythonhosted.org/packages/56/38/12730c05e5ad40a76374d440ed8b0899729a96c250516d91c620a6e38fc2/yarl-1.23.0-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:75e3026ab649bf48f9a10c0134512638725b521340293f202a69b567518d94e0", size = 100285, upload-time = "2026-03-01T22:04:31.752Z" }, + { url = "https://files.pythonhosted.org/packages/34/92/6a7be9239f2347234e027284e7a5f74b1140cc86575e7b469d13fba1ebfe/yarl-1.23.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:80e6d33a3d42a7549b409f199857b4fb54e2103fc44fb87605b6663b7a7ff750", size = 108230, upload-time = "2026-03-01T22:04:33.844Z" }, + { url = "https://files.pythonhosted.org/packages/5e/81/4aebccfa9376bd98b9d8bfad20621a57d3e8cfc5b8631c1fa5f62cdd03f4/yarl-1.23.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:5ec2f42d41ccbd5df0270d7df31618a8ee267bfa50997f5d720ddba86c4a83a6", size = 103008, upload-time = "2026-03-01T22:04:35.856Z" }, + { url = "https://files.pythonhosted.org/packages/38/0f/0b4e3edcec794a86b853b0c6396c0a888d72dfce19b2d88c02ac289fb6c1/yarl-1.23.0-cp310-cp310-win32.whl", hash = "sha256:debe9c4f41c32990771be5c22b56f810659f9ddf3d63f67abfdcaa2c6c9c5c1d", size = 83073, upload-time = "2026-03-01T22:04:38.268Z" }, + { url = "https://files.pythonhosted.org/packages/a0/71/ad95c33da18897e4c636528bbc24a1dd23fe16797de8bc4ec667b8db0ba4/yarl-1.23.0-cp310-cp310-win_amd64.whl", hash = "sha256:ab5f043cb8a2d71c981c09c510da013bc79fd661f5c60139f00dd3c3cc4f2ffb", size = 87328, upload-time = "2026-03-01T22:04:39.558Z" }, + { url = "https://files.pythonhosted.org/packages/e2/14/dfa369523c79bccf9c9c746b0a63eb31f65db9418ac01275f7950962e504/yarl-1.23.0-cp310-cp310-win_arm64.whl", hash = "sha256:263cd4f47159c09b8b685890af949195b51d1aa82ba451c5847ca9bc6413c220", size = 82463, upload-time = "2026-03-01T22:04:41.454Z" }, + { url = "https://files.pythonhosted.org/packages/a2/aa/60da938b8f0997ba3a911263c40d82b6f645a67902a490b46f3355e10fae/yarl-1.23.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b35d13d549077713e4414f927cdc388d62e543987c572baee613bf82f11a4b99", size = 123641, upload-time = "2026-03-01T22:04:42.841Z" }, + { url = "https://files.pythonhosted.org/packages/24/84/e237607faf4e099dbb8a4f511cfd5efcb5f75918baad200ff7380635631b/yarl-1.23.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cbb0fef01f0c6b38cb0f39b1f78fc90b807e0e3c86a7ff3ce74ad77ce5c7880c", size = 86248, upload-time = "2026-03-01T22:04:44.757Z" }, + { url = "https://files.pythonhosted.org/packages/b2/0d/71ceabc14c146ba8ee3804ca7b3d42b1664c8440439de5214d366fec7d3a/yarl-1.23.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dc52310451fc7c629e13c4e061cbe2dd01684d91f2f8ee2821b083c58bd72432", size = 85988, upload-time = "2026-03-01T22:04:46.365Z" }, + { url = "https://files.pythonhosted.org/packages/8c/6c/4a90d59c572e46b270ca132aca66954f1175abd691f74c1ef4c6711828e2/yarl-1.23.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b2c6b50c7b0464165472b56b42d4c76a7b864597007d9c085e8b63e185cf4a7a", size = 100566, upload-time = "2026-03-01T22:04:47.639Z" }, + { url = "https://files.pythonhosted.org/packages/49/fb/c438fb5108047e629f6282a371e6e91cf3f97ee087c4fb748a1f32ceef55/yarl-1.23.0-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:aafe5dcfda86c8af00386d7781d4c2181b5011b7be3f2add5e99899ea925df05", size = 92079, upload-time = "2026-03-01T22:04:48.925Z" }, + { url = "https://files.pythonhosted.org/packages/d9/13/d269aa1aed3e4f50a5a103f96327210cc5fa5dd2d50882778f13c7a14606/yarl-1.23.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:9ee33b875f0b390564c1fb7bc528abf18c8ee6073b201c6ae8524aca778e2d83", size = 108741, upload-time = "2026-03-01T22:04:50.838Z" }, + { url = "https://files.pythonhosted.org/packages/85/fb/115b16f22c37ea4437d323e472945bea97301c8ec6089868fa560abab590/yarl-1.23.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:4c41e021bc6d7affb3364dc1e1e5fa9582b470f283748784bd6ea0558f87f42c", size = 108099, upload-time = "2026-03-01T22:04:52.499Z" }, + { url = "https://files.pythonhosted.org/packages/9a/64/c53487d9f4968045b8afa51aed7ca44f58b2589e772f32745f3744476c82/yarl-1.23.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:99c8a9ed30f4164bc4c14b37a90208836cbf50d4ce2a57c71d0f52c7fb4f7598", size = 102678, upload-time = "2026-03-01T22:04:55.176Z" }, + { url = "https://files.pythonhosted.org/packages/85/59/cd98e556fbb2bf8fab29c1a722f67ad45c5f3447cac798ab85620d1e70af/yarl-1.23.0-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f2af5c81a1f124609d5f33507082fc3f739959d4719b56877ab1ee7e7b3d602b", size = 100803, upload-time = "2026-03-01T22:04:56.588Z" }, + { url = "https://files.pythonhosted.org/packages/9e/c0/b39770b56d4a9f0bb5f77e2f1763cd2d75cc2f6c0131e3b4c360348fcd65/yarl-1.23.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6b41389c19b07c760c7e427a3462e8ab83c4bb087d127f0e854c706ce1b9215c", size = 100163, upload-time = "2026-03-01T22:04:58.492Z" }, + { url = "https://files.pythonhosted.org/packages/e7/64/6980f99ab00e1f0ff67cb84766c93d595b067eed07439cfccfc8fb28c1a6/yarl-1.23.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:1dc702e42d0684f42d6519c8d581e49c96cefaaab16691f03566d30658ee8788", size = 93859, upload-time = "2026-03-01T22:05:00.268Z" }, + { url = "https://files.pythonhosted.org/packages/38/69/912e6c5e146793e5d4b5fe39ff5b00f4d22463dfd5a162bec565ac757673/yarl-1.23.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:0e40111274f340d32ebcc0a5668d54d2b552a6cca84c9475859d364b380e3222", size = 108202, upload-time = "2026-03-01T22:05:02.273Z" }, + { url = "https://files.pythonhosted.org/packages/59/97/35ca6767524687ad64e5f5c31ad54bc76d585585a9fcb40f649e7e82ffed/yarl-1.23.0-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:4764a6a7588561a9aef92f65bda2c4fb58fe7c675c0883862e6df97559de0bfb", size = 99866, upload-time = "2026-03-01T22:05:03.597Z" }, + { url = "https://files.pythonhosted.org/packages/d3/1c/1a3387ee6d73589f6f2a220ae06f2984f6c20b40c734989b0a44f5987308/yarl-1.23.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:03214408cfa590df47728b84c679ae4ef00be2428e11630277be0727eba2d7cc", size = 107852, upload-time = "2026-03-01T22:05:04.986Z" }, + { url = "https://files.pythonhosted.org/packages/a4/b8/35c0750fcd5a3f781058bfd954515dd4b1eab45e218cbb85cf11132215f1/yarl-1.23.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:170e26584b060879e29fac213e4228ef063f39128723807a312e5c7fec28eff2", size = 102919, upload-time = "2026-03-01T22:05:06.397Z" }, + { url = "https://files.pythonhosted.org/packages/e5/1c/9a1979aec4a81896d597bcb2177827f2dbee3f5b7cc48b2d0dadb644b41d/yarl-1.23.0-cp311-cp311-win32.whl", hash = "sha256:51430653db848d258336cfa0244427b17d12db63d42603a55f0d4546f50f25b5", size = 82602, upload-time = "2026-03-01T22:05:08.444Z" }, + { url = "https://files.pythonhosted.org/packages/93/22/b85eca6fa2ad9491af48c973e4c8cf6b103a73dbb271fe3346949449fca0/yarl-1.23.0-cp311-cp311-win_amd64.whl", hash = "sha256:bf49a3ae946a87083ef3a34c8f677ae4243f5b824bfc4c69672e72b3d6719d46", size = 87461, upload-time = "2026-03-01T22:05:10.145Z" }, + { url = "https://files.pythonhosted.org/packages/93/95/07e3553fe6f113e6864a20bdc53a78113cda3b9ced8784ee52a52c9f80d8/yarl-1.23.0-cp311-cp311-win_arm64.whl", hash = "sha256:b39cb32a6582750b6cc77bfb3c49c0f8760dc18dc96ec9fb55fbb0f04e08b928", size = 82336, upload-time = "2026-03-01T22:05:11.554Z" }, + { url = "https://files.pythonhosted.org/packages/88/8a/94615bc31022f711add374097ad4144d569e95ff3c38d39215d07ac153a0/yarl-1.23.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:1932b6b8bba8d0160a9d1078aae5838a66039e8832d41d2992daa9a3a08f7860", size = 124737, upload-time = "2026-03-01T22:05:12.897Z" }, + { url = "https://files.pythonhosted.org/packages/e3/6f/c6554045d59d64052698add01226bc867b52fe4a12373415d7991fdca95d/yarl-1.23.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:411225bae281f114067578891bc75534cfb3d92a3b4dfef7a6ca78ba354e6069", size = 87029, upload-time = "2026-03-01T22:05:14.376Z" }, + { url = "https://files.pythonhosted.org/packages/19/2a/725ecc166d53438bc88f76822ed4b1e3b10756e790bafd7b523fe97c322d/yarl-1.23.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:13a563739ae600a631c36ce096615fe307f131344588b0bc0daec108cdb47b25", size = 86310, upload-time = "2026-03-01T22:05:15.71Z" }, + { url = "https://files.pythonhosted.org/packages/99/30/58260ed98e6ff7f90ba84442c1ddd758c9170d70327394a6227b310cd60f/yarl-1.23.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9cbf44c5cb4a7633d078788e1b56387e3d3cf2b8139a3be38040b22d6c3221c8", size = 97587, upload-time = "2026-03-01T22:05:17.384Z" }, + { url = "https://files.pythonhosted.org/packages/76/0a/8b08aac08b50682e65759f7f8dde98ae8168f72487e7357a5d684c581ef9/yarl-1.23.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:53ad387048f6f09a8969631e4de3f1bf70c50e93545d64af4f751b2498755072", size = 92528, upload-time = "2026-03-01T22:05:18.804Z" }, + { url = "https://files.pythonhosted.org/packages/52/07/0b7179101fe5f8385ec6c6bb5d0cb9f76bd9fb4a769591ab6fb5cdbfc69a/yarl-1.23.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:4a59ba56f340334766f3a4442e0efd0af895fae9e2b204741ef885c446b3a1a8", size = 105339, upload-time = "2026-03-01T22:05:20.235Z" }, + { url = "https://files.pythonhosted.org/packages/d3/8a/36d82869ab5ec829ca8574dfcb92b51286fcfb1e9c7a73659616362dc880/yarl-1.23.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:803a3c3ce4acc62eaf01eaca1208dcf0783025ef27572c3336502b9c232005e7", size = 105061, upload-time = "2026-03-01T22:05:22.268Z" }, + { url = "https://files.pythonhosted.org/packages/66/3e/868e5c3364b6cee19ff3e1a122194fa4ce51def02c61023970442162859e/yarl-1.23.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a3d2bff8f37f8d0f96c7ec554d16945050d54462d6e95414babaa18bfafc7f51", size = 100132, upload-time = "2026-03-01T22:05:23.638Z" }, + { url = "https://files.pythonhosted.org/packages/cf/26/9c89acf82f08a52cb52d6d39454f8d18af15f9d386a23795389d1d423823/yarl-1.23.0-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c75eb09e8d55bceb4367e83496ff8ef2bc7ea6960efb38e978e8073ea59ecb67", size = 99289, upload-time = "2026-03-01T22:05:25.749Z" }, + { url = "https://files.pythonhosted.org/packages/6f/54/5b0db00d2cb056922356104468019c0a132e89c8d3ab67d8ede9f4483d2a/yarl-1.23.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:877b0738624280e34c55680d6054a307aa94f7d52fa0e3034a9cc6e790871da7", size = 96950, upload-time = "2026-03-01T22:05:27.318Z" }, + { url = "https://files.pythonhosted.org/packages/f6/40/10fa93811fd439341fad7e0718a86aca0de9548023bbb403668d6555acab/yarl-1.23.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:b5405bb8f0e783a988172993cfc627e4d9d00432d6bbac65a923041edacf997d", size = 93960, upload-time = "2026-03-01T22:05:28.738Z" }, + { url = "https://files.pythonhosted.org/packages/bc/d2/8ae2e6cd77d0805f4526e30ec43b6f9a3dfc542d401ac4990d178e4bf0cf/yarl-1.23.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:1c3a3598a832590c5a3ce56ab5576361b5688c12cb1d39429cf5dba30b510760", size = 104703, upload-time = "2026-03-01T22:05:30.438Z" }, + { url = "https://files.pythonhosted.org/packages/2f/0c/b3ceacf82c3fe21183ce35fa2acf5320af003d52bc1fcf5915077681142e/yarl-1.23.0-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:8419ebd326430d1cbb7efb5292330a2cf39114e82df5cc3d83c9a0d5ebeaf2f2", size = 98325, upload-time = "2026-03-01T22:05:31.835Z" }, + { url = "https://files.pythonhosted.org/packages/9d/e0/12900edd28bdab91a69bd2554b85ad7b151f64e8b521fe16f9ad2f56477a/yarl-1.23.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:be61f6fff406ca40e3b1d84716fde398fc08bc63dd96d15f3a14230a0973ed86", size = 105067, upload-time = "2026-03-01T22:05:33.358Z" }, + { url = "https://files.pythonhosted.org/packages/15/61/74bb1182cf79c9bbe4eb6b1f14a57a22d7a0be5e9cedf8e2d5c2086474c3/yarl-1.23.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3ceb13c5c858d01321b5d9bb65e4cf37a92169ea470b70fec6f236b2c9dd7e34", size = 100285, upload-time = "2026-03-01T22:05:35.4Z" }, + { url = "https://files.pythonhosted.org/packages/69/7f/cd5ef733f2550de6241bd8bd8c3febc78158b9d75f197d9c7baa113436af/yarl-1.23.0-cp312-cp312-win32.whl", hash = "sha256:fffc45637bcd6538de8b85f51e3df3223e4ad89bccbfca0481c08c7fc8b7ed7d", size = 82359, upload-time = "2026-03-01T22:05:36.811Z" }, + { url = "https://files.pythonhosted.org/packages/f5/be/25216a49daeeb7af2bec0db22d5e7df08ed1d7c9f65d78b14f3b74fd72fc/yarl-1.23.0-cp312-cp312-win_amd64.whl", hash = "sha256:f69f57305656a4852f2a7203efc661d8c042e6cc67f7acd97d8667fb448a426e", size = 87674, upload-time = "2026-03-01T22:05:38.171Z" }, + { url = "https://files.pythonhosted.org/packages/d2/35/aeab955d6c425b227d5b7247eafb24f2653fedc32f95373a001af5dfeb9e/yarl-1.23.0-cp312-cp312-win_arm64.whl", hash = "sha256:6e87a6e8735b44816e7db0b2fbc9686932df473c826b0d9743148432e10bb9b9", size = 81879, upload-time = "2026-03-01T22:05:40.006Z" }, + { url = "https://files.pythonhosted.org/packages/69/68/c8739671f5699c7dc470580a4f821ef37c32c4cb0b047ce223a7f115757f/yarl-1.23.0-py3-none-any.whl", hash = "sha256:a2df6afe50dea8ae15fa34c9f824a3ee958d785fd5d089063d960bae1daa0a3f", size = 48288, upload-time = "2026-03-01T22:07:51.388Z" }, ]